text
stringlengths 3
1.05M
|
---|
//
// MASService.h
// MASFoundation
//
// Copyright (c) 2016 CA. All rights reserved.
//
// This software may be modified and distributed under the terms
// of the MIT license. See the LICENSE file for details.
//
@import Foundation;
/**
* The enumerated status for the Lifecycle of the micro services.
*/
typedef NS_ENUM(NSInteger, MASServiceLifecycleStatus)
{
MASServiceLifecycleStatusUnknown = -1,
MASServiceLifecycleStatusInitialized,
MASServiceLifecycleStatusLoaded,
MASServiceLifecycleStatusWillStart,
MASServiceLifecycleStatusDidStart,
MASServiceLifecycleStatusWillStop,
MASServiceLifecycleStatusDidStop,
MASServiceLifecycleStatusStateCount
};
/**
* MASService is the base class for all services which are to be run during
* the internal service's lifecycle.
*/
@interface MASService : NSObject
///--------------------------------------
/// @name Properties
///-------------------------------------
# pragma mark - Properties
/**
* The current MASServiceLifecycleStatus of the MASService.
*/
@property (nonatomic, assign, readonly) MASServiceLifecycleStatus lifecycleStatus;
///--------------------------------------
/// @name Shared Service
///-------------------------------------
# pragma mark - Shared Service
/**
* Retrieve the shared MASService singleton.
*
* Note, subclasses should override this version of the method.
*
* @return Returns the shared MASService singleton.
*/
+ (instancetype _Nullable)sharedService;
///--------------------------------------
/// @name Lifecycle
///-------------------------------------
# pragma mark - Lifecycle
/**
* Retreives the UUID assigned to a particular MASService subclass. All subclasses MUST
* implement this method with a unique value. If it does not the super version will
* return null and the services will be rejected for membership in the internal MASService
* process.
*
* @return Return the UUID assigned to the MASService subclass.
*/
+ (NSString *_Nullable)serviceUUID;
/**
* The lifecycle method to indicate to the service to perform an processes or
* acquire any resources it requires at the MASServiceLifecycleStatusLoaded state.
*
* Subclasses should override this to do this for their specific requirements.
*/
- (void)serviceDidLoad;
/**
* The lifecycle method to indicate the service should perform any processes or
* acquire any resources BEFORE it enters the MASServiceLifecycleStatusWillStart state.
*
* Subclasses should override this to do this for their specific requirements.
* They should call this super version at the start of their override.
*/
- (void)serviceWillStart;
/**
* The lifecycle method to indicate the service should perform any processes or
* acquire any resources at the MASServiceLifecycleStatusDidStart state.
*
* Subclasses should override this to do this for their specific requirements.
* They should call this super version at the start of their override.
*/
- (void)serviceDidStart;
/**
* The lifecycle method to indicate the service should perform any processes or
* release any resources BEFORE it enters the MASServiceLifecycleStatusWillStop state.
*
* Subclasses should override this to do this for their specific requirements.
* They should call this super version at the start of their override.
*/
- (void)serviceWillStop;
/**
* The lifecycle method to indicate the service should perform any processes or
* release any resources at the MASServiceLifecycleStatusDidStop state.
*
* Subclasses should override this to do this for their specific requirements.
* They should call this super version at the start of their override.
*/
- (void)serviceDidStop;
/**
* The lifecycle method to indicate the service should perform any processes or
* release any resources to bring the service to a default installation state.
*
* Subclasses should override this to do this for their specific requirements.
* They should call this super version at the start of their override.
*/
- (void)serviceDidReset;
///--------------------------------------
/// @name Lifecycle Status
///-------------------------------------
# pragma mark - Lifecycle Status
/**
* Retrieve a human readable string value for the current MASServiceLifecycleStatus.
*
* @return Returns the MASServiceLifecycleStatus in a human readable string.
*/
- (NSString *_Nonnull)lifecycleStatusAsString;
/**
* Retrieve a human readable string value for the given MASServiceLifecycleStatus.
*
* @param status The MASServiceLifecycleStatus.
* @return Returns the MASServiceLifecycleStatus in a human readable string.
*/
+ (NSString *_Nonnull)lifecycleStatusToString:(MASServiceLifecycleStatus)status;
///--------------------------------------
/// @name Subclass Registry Methods
///-------------------------------------
# pragma mark - Subclass Registry Methods
/**
An array of subclass information that is inherited and registered to MASService class
@return An array of subclasses of MASService
*/
+ (NSArray * _Nullable)getSubclasses;
/**
A method to register any inherited MASService class to MASServiceRegistry.
@warning Any class that is subclassing `MASService` class MUST register through this method with its own service UUID. `serviceUUID` MUST be unique to its service, and MUST be registered inside `MASFoundation` to be recognized at the moment.
@param subclass Class object of subclass
@param serviceUUID NSString value of its own unique serviceUUID that was registered on MASFoundation
*/
+ (void)registerSubclass:(Class _Nonnull)subclass serviceUUID:(NSString * _Nonnull)serviceUUID;
@end
///--------------------------------------
/// @name Subclass Protected Methods
///-------------------------------------
# pragma mark - Subclass Protected Methods
@interface MASService (SubclassingHooks)
/**
* Protected initializer to be used by subclasses. The default init of this
* object will throw an exception to prevent usage.
*
* @return Returns an instance of the MASService.
*/
- (instancetype _Nullable)initProtected;
/**
* Call for subclasses to use to indicate an error has occurred.
*
* @param error the NSError that the service experienced.
*/
- (void)serviceDidFailWithError:(NSError *_Nullable)error;
@end
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.12
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info >= (2, 7, 0):
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_gdal')).lstrip('.')
try:
return importlib.import_module(mname)
except ImportError:
return importlib.import_module('_gdal')
_gdal = swig_import_helper()
del swig_import_helper
elif _swig_python_version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_gdal', [dirname(__file__)])
except ImportError:
import _gdal
return _gdal
try:
_mod = imp.load_module('_gdal', fp, pathname, description)
finally:
if fp is not None:
fp.close()
return _mod
_gdal = swig_import_helper()
del swig_import_helper
else:
import _gdal
del _swig_python_version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except __builtin__.Exception:
class _object:
pass
_newclass = 0
have_warned = 0
def deprecation_warn(module, sub_package=None, new_module=None):
global have_warned
if have_warned == 1:
return
have_warned = 1
if sub_package is None or sub_package == 'utils':
sub_package = 'osgeo_utils'
if new_module is None:
new_module = module
new_module = '{}.{}'.format(sub_package, new_module)
from warnings import warn
warn('{}.py was placed in a namespace, it is now available as {}' .format(module, new_module),
DeprecationWarning)
from osgeo.gdalconst import *
from osgeo import gdalconst
import sys
byteorders = {"little": "<",
"big": ">"}
array_modes = { gdalconst.GDT_Int16: ("%si2" % byteorders[sys.byteorder]),
gdalconst.GDT_UInt16: ("%su2" % byteorders[sys.byteorder]),
gdalconst.GDT_Int32: ("%si4" % byteorders[sys.byteorder]),
gdalconst.GDT_UInt32: ("%su4" % byteorders[sys.byteorder]),
gdalconst.GDT_Float32: ("%sf4" % byteorders[sys.byteorder]),
gdalconst.GDT_Float64: ("%sf8" % byteorders[sys.byteorder]),
gdalconst.GDT_CFloat32: ("%sf4" % byteorders[sys.byteorder]),
gdalconst.GDT_CFloat64: ("%sf8" % byteorders[sys.byteorder]),
gdalconst.GDT_Byte: ("%st8" % byteorders[sys.byteorder]),
}
def RGBFile2PCTFile( src_filename, dst_filename ):
src_ds = Open(src_filename)
if src_ds is None or src_ds == 'NULL':
return 1
ct = ColorTable()
err = ComputeMedianCutPCT(src_ds.GetRasterBand(1),
src_ds.GetRasterBand(2),
src_ds.GetRasterBand(3),
256, ct)
if err != 0:
return err
gtiff_driver = GetDriverByName('GTiff')
if gtiff_driver is None:
return 1
dst_ds = gtiff_driver.Create(dst_filename,
src_ds.RasterXSize, src_ds.RasterYSize)
dst_ds.GetRasterBand(1).SetRasterColorTable(ct)
err = DitherRGB2PCT(src_ds.GetRasterBand(1),
src_ds.GetRasterBand(2),
src_ds.GetRasterBand(3),
dst_ds.GetRasterBand(1),
ct)
dst_ds = None
src_ds = None
return 0
def listdir(path, recursionLevel = -1, options = []):
""" Iterate over a directory.
recursionLevel = -1 means unlimited level of recursion.
"""
dir = OpenDir(path, recursionLevel, options)
if not dir:
raise OSError(path + ' does not exist')
try:
while True:
entry = GetNextDirEntry(dir)
if not entry:
break
yield entry
finally:
CloseDir(dir)
def GetUseExceptions(*args):
"""GetUseExceptions() -> int"""
return _gdal.GetUseExceptions(*args)
def UseExceptions(*args):
"""UseExceptions()"""
return _gdal.UseExceptions(*args)
def DontUseExceptions(*args):
"""DontUseExceptions()"""
return _gdal.DontUseExceptions(*args)
def VSIFReadL(*args):
"""VSIFReadL(unsigned int nMembSize, unsigned int nMembCount, VSILFILE fp) -> unsigned int"""
return _gdal.VSIFReadL(*args)
def VSIGetMemFileBuffer_unsafe(*args):
"""VSIGetMemFileBuffer_unsafe(char const * utf8_path)"""
return _gdal.VSIGetMemFileBuffer_unsafe(*args)
def InfoOptions(options=None, format='text', deserialize=True,
computeMinMax=False, reportHistograms=False, reportProj4=False,
stats=False, approxStats=False, computeChecksum=False,
showGCPs=True, showMetadata=True, showRAT=True, showColorTable=True,
listMDD=False, showFileList=True, allMetadata=False,
extraMDDomains=None, wktFormat=None):
""" Create a InfoOptions() object that can be passed to gdal.Info()
options can be be an array of strings, a string or let empty and filled from other keywords."""
options = [] if options is None else options
if isinstance(options, str):
new_options = ParseCommandLine(options)
format = 'text'
if '-json' in new_options:
format = 'json'
else:
new_options = options
if format == 'json':
new_options += ['-json']
if '-json' in new_options:
format = 'json'
if computeMinMax:
new_options += ['-mm']
if reportHistograms:
new_options += ['-hist']
if reportProj4:
new_options += ['-proj4']
if stats:
new_options += ['-stats']
if approxStats:
new_options += ['-approx_stats']
if computeChecksum:
new_options += ['-checksum']
if not showGCPs:
new_options += ['-nogcp']
if not showMetadata:
new_options += ['-nomd']
if not showRAT:
new_options += ['-norat']
if not showColorTable:
new_options += ['-noct']
if listMDD:
new_options += ['-listmdd']
if not showFileList:
new_options += ['-nofl']
if allMetadata:
new_options += ['-mdd', 'all']
if wktFormat:
new_options += ['-wkt_format', wktFormat]
if extraMDDomains is not None:
for mdd in extraMDDomains:
new_options += ['-mdd', mdd]
return (GDALInfoOptions(new_options), format, deserialize)
def Info(ds, **kwargs):
""" Return information on a dataset.
Arguments are :
ds --- a Dataset object or a filename
Keyword arguments are :
options --- return of gdal.InfoOptions(), string or array of strings
other keywords arguments of gdal.InfoOptions()
If options is provided as a gdal.InfoOptions() object, other keywords are ignored. """
if 'options' not in kwargs or isinstance(kwargs['options'], (list, str)):
(opts, format, deserialize) = InfoOptions(**kwargs)
else:
(opts, format, deserialize) = kwargs['options']
if isinstance(ds, str):
ds = Open(ds)
ret = InfoInternal(ds, opts)
if format == 'json' and deserialize:
import json
ret = json.loads(ret)
return ret
def MultiDimInfoOptions(options=None, detailed=False, array=None, arrayoptions=None, limit=None, as_text=False):
""" Create a MultiDimInfoOptions() object that can be passed to gdal.MultiDimInfo()
options can be be an array of strings, a string or let empty and filled from other keywords."""
options = [] if options is None else options
if isinstance(options, str):
new_options = ParseCommandLine(options)
else:
new_options = options
if detailed:
new_options += ['-detailed']
if array:
new_options += ['-array', array]
if limit:
new_options += ['-limit', str(limit)]
if arrayoptions:
for option in arrayoptions:
new_options += ['-arrayoption', option]
return GDALMultiDimInfoOptions(new_options), as_text
def MultiDimInfo(ds, **kwargs):
""" Return information on a dataset.
Arguments are :
ds --- a Dataset object or a filename
Keyword arguments are :
options --- return of gdal.MultiDimInfoOptions(), string or array of strings
other keywords arguments of gdal.MultiDimInfoOptions()
If options is provided as a gdal.MultiDimInfoOptions() object, other keywords are ignored. """
if 'options' not in kwargs or isinstance(kwargs['options'], (list, str)):
opts, as_text = MultiDimInfoOptions(**kwargs)
else:
opts = kwargs['options']
as_text = True
if isinstance(ds, str):
ds = OpenEx(ds, OF_VERBOSE_ERROR | OF_MULTIDIM_RASTER)
ret = MultiDimInfoInternal(ds, opts)
if not as_text:
import json
ret = json.loads(ret)
return ret
def _strHighPrec(x):
return x if isinstance(x, str) else '%.18g' % x
mapGRIORAMethodToString = {
gdalconst.GRIORA_NearestNeighbour: 'near',
gdalconst.GRIORA_Bilinear: 'bilinear',
gdalconst.GRIORA_Cubic: 'cubic',
gdalconst.GRIORA_CubicSpline: 'cubicspline',
gdalconst.GRIORA_Lanczos: 'lanczos',
gdalconst.GRIORA_Average: 'average',
gdalconst.GRIORA_RMS: 'rms',
gdalconst.GRIORA_Mode: 'mode',
gdalconst.GRIORA_Gauss: 'gauss',
}
def TranslateOptions(options=None, format=None,
outputType = gdalconst.GDT_Unknown, bandList=None, maskBand=None,
width = 0, height = 0, widthPct = 0.0, heightPct = 0.0,
xRes = 0.0, yRes = 0.0,
creationOptions=None, srcWin=None, projWin=None, projWinSRS=None, strict = False,
unscale = False, scaleParams=None, exponents=None,
outputBounds=None, metadataOptions=None,
outputSRS=None, nogcp=False, GCPs=None,
noData=None, rgbExpand=None,
stats = False, rat = True, resampleAlg=None,
callback=None, callback_data=None):
""" Create a TranslateOptions() object that can be passed to gdal.Translate()
Keyword arguments are :
options --- can be be an array of strings, a string or let empty and filled from other keywords.
format --- output format ("GTiff", etc...)
outputType --- output type (gdalconst.GDT_Byte, etc...)
bandList --- array of band numbers (index start at 1)
maskBand --- mask band to generate or not ("none", "auto", "mask", 1, ...)
width --- width of the output raster in pixel
height --- height of the output raster in pixel
widthPct --- width of the output raster in percentage (100 = original width)
heightPct --- height of the output raster in percentage (100 = original height)
xRes --- output horizontal resolution
yRes --- output vertical resolution
creationOptions --- list of creation options
srcWin --- subwindow in pixels to extract: [left_x, top_y, width, height]
projWin --- subwindow in projected coordinates to extract: [ulx, uly, lrx, lry]
projWinSRS --- SRS in which projWin is expressed
strict --- strict mode
unscale --- unscale values with scale and offset metadata
scaleParams --- list of scale parameters, each of the form [src_min,src_max] or [src_min,src_max,dst_min,dst_max]
exponents --- list of exponentiation parameters
outputBounds --- assigned output bounds: [ulx, uly, lrx, lry]
metadataOptions --- list of metadata options
outputSRS --- assigned output SRS
nogcp --- ignore GCP in the raster
GCPs --- list of GCPs
noData --- nodata value (or "none" to unset it)
rgbExpand --- Color palette expansion mode: "gray", "rgb", "rgba"
stats --- whether to calculate statistics
rat --- whether to write source RAT
resampleAlg --- resampling mode
callback --- callback method
callback_data --- user data for callback
"""
# Only used for tests
return_option_list = options == '__RETURN_OPTION_LIST__'
if return_option_list:
options = []
else:
options = [] if options is None else options
if isinstance(options, str):
new_options = ParseCommandLine(options)
else:
new_options = options
if format is not None:
new_options += ['-of', format]
if outputType != gdalconst.GDT_Unknown:
new_options += ['-ot', GetDataTypeName(outputType)]
if maskBand != None:
new_options += ['-mask', str(maskBand)]
if bandList != None:
for b in bandList:
new_options += ['-b', str(b)]
if width != 0 or height != 0:
new_options += ['-outsize', str(width), str(height)]
elif widthPct != 0 and heightPct != 0:
new_options += ['-outsize', str(widthPct) + '%%', str(heightPct) + '%%']
if creationOptions is not None:
if isinstance(creationOptions, str):
new_options += ['-co', creationOptions]
else:
for opt in creationOptions:
new_options += ['-co', opt]
if srcWin is not None:
new_options += ['-srcwin', _strHighPrec(srcWin[0]), _strHighPrec(srcWin[1]), _strHighPrec(srcWin[2]), _strHighPrec(srcWin[3])]
if strict:
new_options += ['-strict']
if unscale:
new_options += ['-unscale']
if scaleParams:
for scaleParam in scaleParams:
new_options += ['-scale']
for v in scaleParam:
new_options += [str(v)]
if exponents:
for exponent in exponents:
new_options += ['-exponent', _strHighPrec(exponent)]
if outputBounds is not None:
new_options += ['-a_ullr', _strHighPrec(outputBounds[0]), _strHighPrec(outputBounds[1]), _strHighPrec(outputBounds[2]), _strHighPrec(outputBounds[3])]
if metadataOptions is not None:
if isinstance(metadataOptions, str):
new_options += ['-mo', metadataOptions]
else:
for opt in metadataOptions:
new_options += ['-mo', opt]
if outputSRS is not None:
new_options += ['-a_srs', str(outputSRS)]
if nogcp:
new_options += ['-nogcp']
if GCPs is not None:
for gcp in GCPs:
new_options += ['-gcp', _strHighPrec(gcp.GCPPixel), _strHighPrec(gcp.GCPLine), _strHighPrec(gcp.GCPX), str(gcp.GCPY), _strHighPrec(gcp.GCPZ)]
if projWin is not None:
new_options += ['-projwin', _strHighPrec(projWin[0]), _strHighPrec(projWin[1]), _strHighPrec(projWin[2]), _strHighPrec(projWin[3])]
if projWinSRS is not None:
new_options += ['-projwin_srs', str(projWinSRS)]
if noData is not None:
new_options += ['-a_nodata', _strHighPrec(noData)]
if rgbExpand is not None:
new_options += ['-expand', str(rgbExpand)]
if stats:
new_options += ['-stats']
if not rat:
new_options += ['-norat']
if resampleAlg is not None:
if resampleAlg in mapGRIORAMethodToString:
new_options += ['-r', mapGRIORAMethodToString[resampleAlg]]
else:
new_options += ['-r', str(resampleAlg)]
if xRes != 0 and yRes != 0:
new_options += ['-tr', _strHighPrec(xRes), _strHighPrec(yRes)]
if return_option_list:
return new_options
return (GDALTranslateOptions(new_options), callback, callback_data)
def Translate(destName, srcDS, **kwargs):
""" Convert a dataset.
Arguments are :
destName --- Output dataset name
srcDS --- a Dataset object or a filename
Keyword arguments are :
options --- return of gdal.TranslateOptions(), string or array of strings
other keywords arguments of gdal.TranslateOptions()
If options is provided as a gdal.TranslateOptions() object, other keywords are ignored. """
if 'options' not in kwargs or isinstance(kwargs['options'], (list, str)):
(opts, callback, callback_data) = TranslateOptions(**kwargs)
else:
(opts, callback, callback_data) = kwargs['options']
if isinstance(srcDS, str):
srcDS = Open(srcDS)
return TranslateInternal(destName, srcDS, opts, callback, callback_data)
def WarpOptions(options=None, format=None,
outputBounds=None,
outputBoundsSRS=None,
xRes=None, yRes=None, targetAlignedPixels = False,
width = 0, height = 0,
srcSRS=None, dstSRS=None,
coordinateOperation=None,
srcAlpha = False, dstAlpha = False,
warpOptions=None, errorThreshold=None,
warpMemoryLimit=None, creationOptions=None, outputType = gdalconst.GDT_Unknown,
workingType = gdalconst.GDT_Unknown, resampleAlg=None,
srcNodata=None, dstNodata=None, multithread = False,
tps = False, rpc = False, geoloc = False, polynomialOrder=None,
transformerOptions=None, cutlineDSName=None,
cutlineLayer=None, cutlineWhere=None, cutlineSQL=None, cutlineBlend=None, cropToCutline = False,
copyMetadata = True, metadataConflictValue=None,
setColorInterpretation = False,
overviewLevel = 'AUTO',
callback=None, callback_data=None):
""" Create a WarpOptions() object that can be passed to gdal.Warp()
Keyword arguments are :
options --- can be be an array of strings, a string or let empty and filled from other keywords.
format --- output format ("GTiff", etc...)
outputBounds --- output bounds as (minX, minY, maxX, maxY) in target SRS
outputBoundsSRS --- SRS in which output bounds are expressed, in the case they are not expressed in dstSRS
xRes, yRes --- output resolution in target SRS
targetAlignedPixels --- whether to force output bounds to be multiple of output resolution
width --- width of the output raster in pixel
height --- height of the output raster in pixel
srcSRS --- source SRS
dstSRS --- output SRS
coordinateOperation -- coordinate operation as a PROJ string or WKT string
srcAlpha --- whether to force the last band of the input dataset to be considered as an alpha band
dstAlpha --- whether to force the creation of an output alpha band
outputType --- output type (gdalconst.GDT_Byte, etc...)
workingType --- working type (gdalconst.GDT_Byte, etc...)
warpOptions --- list of warping options
errorThreshold --- error threshold for approximation transformer (in pixels)
warpMemoryLimit --- size of working buffer in MB
resampleAlg --- resampling mode
creationOptions --- list of creation options
srcNodata --- source nodata value(s)
dstNodata --- output nodata value(s)
multithread --- whether to multithread computation and I/O operations
tps --- whether to use Thin Plate Spline GCP transformer
rpc --- whether to use RPC transformer
geoloc --- whether to use GeoLocation array transformer
polynomialOrder --- order of polynomial GCP interpolation
transformerOptions --- list of transformer options
cutlineDSName --- cutline dataset name
cutlineLayer --- cutline layer name
cutlineWhere --- cutline WHERE clause
cutlineSQL --- cutline SQL statement
cutlineBlend --- cutline blend distance in pixels
cropToCutline --- whether to use cutline extent for output bounds
copyMetadata --- whether to copy source metadata
metadataConflictValue --- metadata data conflict value
setColorInterpretation --- whether to force color interpretation of input bands to output bands
overviewLevel --- To specify which overview level of source files must be used
callback --- callback method
callback_data --- user data for callback
"""
# Only used for tests
return_option_list = options == '__RETURN_OPTION_LIST__'
if return_option_list:
options = []
else:
options = [] if options is None else options
if isinstance(options, str):
new_options = ParseCommandLine(options)
else:
new_options = options
if format is not None:
new_options += ['-of', format]
if outputType != gdalconst.GDT_Unknown:
new_options += ['-ot', GetDataTypeName(outputType)]
if workingType != gdalconst.GDT_Unknown:
new_options += ['-wt', GetDataTypeName(workingType)]
if outputBounds is not None:
new_options += ['-te', _strHighPrec(outputBounds[0]), _strHighPrec(outputBounds[1]), _strHighPrec(outputBounds[2]), _strHighPrec(outputBounds[3])]
if outputBoundsSRS is not None:
new_options += ['-te_srs', str(outputBoundsSRS)]
if xRes is not None and yRes is not None:
new_options += ['-tr', _strHighPrec(xRes), _strHighPrec(yRes)]
if width != 0 or height != 0:
new_options += ['-ts', str(width), str(height)]
if srcSRS is not None:
new_options += ['-s_srs', str(srcSRS)]
if dstSRS is not None:
new_options += ['-t_srs', str(dstSRS)]
if coordinateOperation is not None:
new_options += ['-ct', coordinateOperation]
if targetAlignedPixels:
new_options += ['-tap']
if srcAlpha:
new_options += ['-srcalpha']
if dstAlpha:
new_options += ['-dstalpha']
if warpOptions is not None:
for opt in warpOptions:
new_options += ['-wo', str(opt)]
if errorThreshold is not None:
new_options += ['-et', _strHighPrec(errorThreshold)]
if resampleAlg is not None:
mapMethodToString = {
gdalconst.GRA_NearestNeighbour: 'near',
gdalconst.GRA_Bilinear: 'bilinear',
gdalconst.GRA_Cubic: 'cubic',
gdalconst.GRA_CubicSpline: 'cubicspline',
gdalconst.GRA_Lanczos: 'lanczos',
gdalconst.GRA_Average: 'average',
gdalconst.GRA_RMS: 'rms',
gdalconst.GRA_Mode: 'mode',
gdalconst.GRA_Max: 'max',
gdalconst.GRA_Min: 'min',
gdalconst.GRA_Med: 'med',
gdalconst.GRA_Q1: 'q1',
gdalconst.GRA_Q3: 'q3',
gdalconst.GRA_Sum: 'sum',
}
if resampleAlg in mapMethodToString:
new_options += ['-r', mapMethodToString[resampleAlg]]
else:
new_options += ['-r', str(resampleAlg)]
if warpMemoryLimit is not None:
new_options += ['-wm', str(warpMemoryLimit)]
if creationOptions is not None:
for opt in creationOptions:
new_options += ['-co', opt]
if srcNodata is not None:
new_options += ['-srcnodata', str(srcNodata)]
if dstNodata is not None:
new_options += ['-dstnodata', str(dstNodata)]
if multithread:
new_options += ['-multi']
if tps:
new_options += ['-tps']
if rpc:
new_options += ['-rpc']
if geoloc:
new_options += ['-geoloc']
if polynomialOrder is not None:
new_options += ['-order', str(polynomialOrder)]
if transformerOptions is not None:
for opt in transformerOptions:
new_options += ['-to', opt]
if cutlineDSName is not None:
new_options += ['-cutline', str(cutlineDSName)]
if cutlineLayer is not None:
new_options += ['-cl', str(cutlineLayer)]
if cutlineWhere is not None:
new_options += ['-cwhere', str(cutlineWhere)]
if cutlineSQL is not None:
new_options += ['-csql', str(cutlineSQL)]
if cutlineBlend is not None:
new_options += ['-cblend', str(cutlineBlend)]
if cropToCutline:
new_options += ['-crop_to_cutline']
if not copyMetadata:
new_options += ['-nomd']
if metadataConflictValue:
new_options += ['-cvmd', str(metadataConflictValue)]
if setColorInterpretation:
new_options += ['-setci']
if overviewLevel is None or isinstance(overviewLevel, str):
pass
elif isinstance(overviewLevel, int):
if overviewLevel < 0:
overviewLevel = 'AUTO' + str(overviewLevel)
else:
overviewLevel = str(overviewLevel)
else:
overviewLevel = None
if overviewLevel is not None and overviewLevel != 'AUTO':
new_options += ['-ovr', overviewLevel]
if return_option_list:
return new_options
return (GDALWarpAppOptions(new_options), callback, callback_data)
def Warp(destNameOrDestDS, srcDSOrSrcDSTab, **kwargs):
""" Warp one or several datasets.
Arguments are :
destNameOrDestDS --- Output dataset name or object
srcDSOrSrcDSTab --- an array of Dataset objects or filenames, or a Dataset object or a filename
Keyword arguments are :
options --- return of gdal.WarpOptions(), string or array of strings
other keywords arguments of gdal.WarpOptions()
If options is provided as a gdal.WarpOptions() object, other keywords are ignored. """
if 'options' not in kwargs or isinstance(kwargs['options'], (list, str)):
(opts, callback, callback_data) = WarpOptions(**kwargs)
else:
(opts, callback, callback_data) = kwargs['options']
if isinstance(srcDSOrSrcDSTab, str):
srcDSTab = [Open(srcDSOrSrcDSTab)]
elif isinstance(srcDSOrSrcDSTab, list):
srcDSTab = []
for elt in srcDSOrSrcDSTab:
if isinstance(elt, str):
srcDSTab.append(Open(elt))
else:
srcDSTab.append(elt)
else:
srcDSTab = [srcDSOrSrcDSTab]
if isinstance(destNameOrDestDS, str):
return wrapper_GDALWarpDestName(destNameOrDestDS, srcDSTab, opts, callback, callback_data)
else:
return wrapper_GDALWarpDestDS(destNameOrDestDS, srcDSTab, opts, callback, callback_data)
def VectorTranslateOptions(options=None, format=None,
accessMode=None,
srcSRS=None, dstSRS=None, reproject=True,
coordinateOperation=None,
SQLStatement=None, SQLDialect=None, where=None, selectFields=None,
addFields=False,
forceNullable=False,
emptyStrAsNull=False,
spatFilter=None, spatSRS=None,
datasetCreationOptions=None,
layerCreationOptions=None,
layers=None,
layerName=None,
geometryType=None,
dim=None,
segmentizeMaxDist= None,
makeValid=False,
zField=None,
resolveDomains=False,
skipFailures=False,
limit=None,
callback=None, callback_data=None):
""" Create a VectorTranslateOptions() object that can be passed to gdal.VectorTranslate()
Keyword arguments are :
options --- can be be an array of strings, a string or let empty and filled from other keywords.
format --- output format ("ESRI Shapefile", etc...)
accessMode --- None for creation, 'update', 'append', 'overwrite'
srcSRS --- source SRS
dstSRS --- output SRS (with reprojection if reproject = True)
coordinateOperation -- coordinate operation as a PROJ string or WKT string
reproject --- whether to do reprojection
SQLStatement --- SQL statement to apply to the source dataset
SQLDialect --- SQL dialect ('OGRSQL', 'SQLITE', ...)
where --- WHERE clause to apply to source layer(s)
selectFields --- list of fields to select
addFields --- whether to add new fields found in source layers (to be used with accessMode == 'append')
forceNullable --- whether to drop NOT NULL constraints on newly created fields
emptyStrAsNull --- whether to treat empty string values as NULL
spatFilter --- spatial filter as (minX, minY, maxX, maxY) bounding box
spatSRS --- SRS in which the spatFilter is expressed. If not specified, it is assumed to be the one of the layer(s)
datasetCreationOptions --- list of dataset creation options
layerCreationOptions --- list of layer creation options
layers --- list of layers to convert
layerName --- output layer name
geometryType --- output layer geometry type ('POINT', ....)
dim --- output dimension ('XY', 'XYZ', 'XYM', 'XYZM', 'layer_dim')
segmentizeMaxDist --- maximum distance between consecutive nodes of a line geometry
makeValid --- run MakeValid() on geometries
zField --- name of field to use to set the Z component of geometries
resolveDomains --- whether to create an additional field for each field associated with a coded field domain.
skipFailures --- whether to skip failures
limit -- maximum number of features to read per layer
callback --- callback method
callback_data --- user data for callback
"""
options = [] if options is None else options
if isinstance(options, str):
new_options = ParseCommandLine(options)
else:
new_options = options
if format is not None:
new_options += ['-f', format]
if srcSRS is not None:
new_options += ['-s_srs', str(srcSRS)]
if dstSRS is not None:
if reproject:
new_options += ['-t_srs', str(dstSRS)]
else:
new_options += ['-a_srs', str(dstSRS)]
if coordinateOperation is not None:
new_options += ['-ct', coordinateOperation]
if SQLStatement is not None:
new_options += ['-sql', str(SQLStatement)]
if SQLDialect is not None:
new_options += ['-dialect', str(SQLDialect)]
if where is not None:
new_options += ['-where', str(where)]
if accessMode is not None:
if accessMode == 'update':
new_options += ['-update']
elif accessMode == 'append':
new_options += ['-append']
elif accessMode == 'overwrite':
new_options += ['-overwrite']
else:
raise Exception('unhandled accessMode')
if addFields:
new_options += ['-addfields']
if forceNullable:
new_options += ['-forceNullable']
if emptyStrAsNull:
new_options += ['-emptyStrAsNull']
if selectFields is not None:
val = ''
for item in selectFields:
if val:
val += ','
val += item
new_options += ['-select', val]
if datasetCreationOptions is not None:
for opt in datasetCreationOptions:
new_options += ['-dsco', opt]
if layerCreationOptions is not None:
for opt in layerCreationOptions:
new_options += ['-lco', opt]
if layers is not None:
if isinstance(layers, str):
new_options += [layers]
else:
for lyr in layers:
new_options += [lyr]
if segmentizeMaxDist is not None:
new_options += ['-segmentize', str(segmentizeMaxDist)]
if makeValid:
new_options += ['-makevalid']
if spatFilter is not None:
new_options += ['-spat', str(spatFilter[0]), str(spatFilter[1]), str(spatFilter[2]), str(spatFilter[3])]
if spatSRS is not None:
new_options += ['-spat_srs', str(spatSRS)]
if layerName is not None:
new_options += ['-nln', layerName]
if geometryType is not None:
if isinstance(geometryType, str):
new_options += ['-nlt', geometryType]
else:
for opt in geometryType:
new_options += ['-nlt', opt]
if dim is not None:
new_options += ['-dim', dim]
if zField is not None:
new_options += ['-zfield', zField]
if resolveDomains:
new_options += ['-resolveDomains']
if skipFailures:
new_options += ['-skip']
if limit is not None:
new_options += ['-limit', str(limit)]
if callback is not None:
new_options += ['-progress']
return (GDALVectorTranslateOptions(new_options), callback, callback_data)
def VectorTranslate(destNameOrDestDS, srcDS, **kwargs):
""" Convert one vector dataset
Arguments are :
destNameOrDestDS --- Output dataset name or object
srcDS --- a Dataset object or a filename
Keyword arguments are :
options --- return of gdal.VectorTranslateOptions(), string or array of strings
other keywords arguments of gdal.VectorTranslateOptions()
If options is provided as a gdal.VectorTranslateOptions() object, other keywords are ignored. """
if 'options' not in kwargs or isinstance(kwargs['options'], (list, str)):
(opts, callback, callback_data) = VectorTranslateOptions(**kwargs)
else:
(opts, callback, callback_data) = kwargs['options']
if isinstance(srcDS, str):
srcDS = OpenEx(srcDS, gdalconst.OF_VECTOR)
if isinstance(destNameOrDestDS, str):
return wrapper_GDALVectorTranslateDestName(destNameOrDestDS, srcDS, opts, callback, callback_data)
else:
return wrapper_GDALVectorTranslateDestDS(destNameOrDestDS, srcDS, opts, callback, callback_data)
def DEMProcessingOptions(options=None, colorFilename=None, format=None,
creationOptions=None, computeEdges=False, alg=None, band=1,
zFactor=None, scale=None, azimuth=None, altitude=None,
combined=False, multiDirectional=False, igor=False,
slopeFormat=None, trigonometric=False, zeroForFlat=False,
addAlpha=None, colorSelection=None,
callback=None, callback_data=None):
""" Create a DEMProcessingOptions() object that can be passed to gdal.DEMProcessing()
Keyword arguments are :
options --- can be be an array of strings, a string or let empty and filled from other keywords.
colorFilename --- (mandatory for "color-relief") name of file that contains palette definition for the "color-relief" processing.
format --- output format ("GTiff", etc...)
creationOptions --- list of creation options
computeEdges --- whether to compute values at raster edges.
alg --- 'Horn' (default) or 'ZevenbergenThorne' for hillshade, slope or aspect. 'Wilson' (default) or 'Riley' for TRI
band --- source band number to use
zFactor --- (hillshade only) vertical exaggeration used to pre-multiply the elevations.
scale --- ratio of vertical units to horizontal.
azimuth --- (hillshade only) azimuth of the light, in degrees. 0 if it comes from the top of the raster, 90 from the east, ... The default value, 315, should rarely be changed as it is the value generally used to generate shaded maps.
altitude ---(hillshade only) altitude of the light, in degrees. 90 if the light comes from above the DEM, 0 if it is raking light.
combined --- (hillshade only) whether to compute combined shading, a combination of slope and oblique shading. Only one of combined, multiDirectional and igor can be specified.
multiDirectional --- (hillshade only) whether to compute multi-directional shading. Only one of combined, multiDirectional and igor can be specified.
igor --- (hillshade only) whether to use Igor's hillshading from Maperitive. Only one of combined, multiDirectional and igor can be specified.
slopeformat --- (slope only) "degree" or "percent".
trigonometric --- (aspect only) whether to return trigonometric angle instead of azimuth. Thus 0deg means East, 90deg North, 180deg West, 270deg South.
zeroForFlat --- (aspect only) whether to return 0 for flat areas with slope=0, instead of -9999.
addAlpha --- adds an alpha band to the output file (only for processing = 'color-relief')
colorSelection --- (color-relief only) Determines how color entries are selected from an input value. Can be "nearest_color_entry", "exact_color_entry" or "linear_interpolation". Defaults to "linear_interpolation"
callback --- callback method
callback_data --- user data for callback
"""
options = [] if options is None else options
if isinstance(options, str):
new_options = ParseCommandLine(options)
else:
new_options = options
if format is not None:
new_options += ['-of', format]
if creationOptions is not None:
for opt in creationOptions:
new_options += ['-co', opt]
if computeEdges:
new_options += ['-compute_edges']
if alg:
new_options += ['-alg', alg]
new_options += ['-b', str(band)]
if zFactor is not None:
new_options += ['-z', str(zFactor)]
if scale is not None:
new_options += ['-s', str(scale)]
if azimuth is not None:
new_options += ['-az', str(azimuth)]
if altitude is not None:
new_options += ['-alt', str(altitude)]
if combined:
new_options += ['-combined']
if multiDirectional:
new_options += ['-multidirectional']
if igor:
new_options += ['-igor']
if slopeFormat == 'percent':
new_options += ['-p']
if trigonometric:
new_options += ['-trigonometric']
if zeroForFlat:
new_options += ['-zero_for_flat']
if colorSelection is not None:
if colorSelection == 'nearest_color_entry':
new_options += ['-nearest_color_entry']
elif colorSelection == 'exact_color_entry':
new_options += ['-exact_color_entry']
elif colorSelection == 'linear_interpolation':
pass
else:
raise ValueError("Unsupported value for colorSelection")
if addAlpha:
new_options += ['-alpha']
return (GDALDEMProcessingOptions(new_options), colorFilename, callback, callback_data)
def DEMProcessing(destName, srcDS, processing, **kwargs):
""" Apply a DEM processing.
Arguments are :
destName --- Output dataset name
srcDS --- a Dataset object or a filename
processing --- one of "hillshade", "slope", "aspect", "color-relief", "TRI", "TPI", "Roughness"
Keyword arguments are :
options --- return of gdal.DEMProcessingOptions(), string or array of strings
other keywords arguments of gdal.DEMProcessingOptions()
If options is provided as a gdal.DEMProcessingOptions() object, other keywords are ignored. """
if 'options' not in kwargs or isinstance(kwargs['options'], (list, str)):
(opts, colorFilename, callback, callback_data) = DEMProcessingOptions(**kwargs)
else:
(opts, colorFilename, callback, callback_data) = kwargs['options']
if isinstance(srcDS, str):
srcDS = Open(srcDS)
return DEMProcessingInternal(destName, srcDS, processing, colorFilename, opts, callback, callback_data)
def NearblackOptions(options=None, format=None,
creationOptions=None, white = False, colors=None,
maxNonBlack=None, nearDist=None, setAlpha = False, setMask = False,
callback=None, callback_data=None):
""" Create a NearblackOptions() object that can be passed to gdal.Nearblack()
Keyword arguments are :
options --- can be be an array of strings, a string or let empty and filled from other keywords.
format --- output format ("GTiff", etc...)
creationOptions --- list of creation options
white --- whether to search for nearly white (255) pixels instead of nearly black pixels.
colors --- list of colors to search for, e.g. ((0,0,0),(255,255,255)). The pixels that are considered as the collar are set to 0
maxNonBlack --- number of non-black (or other searched colors specified with white / colors) pixels that can be encountered before the giving up search inwards. Defaults to 2.
nearDist --- select how far from black, white or custom colors the pixel values can be and still considered near black, white or custom color. Defaults to 15.
setAlpha --- adds an alpha band to the output file.
setMask --- adds a mask band to the output file.
callback --- callback method
callback_data --- user data for callback
"""
options = [] if options is None else options
if isinstance(options, str):
new_options = ParseCommandLine(options)
else:
new_options = options
if format is not None:
new_options += ['-of', format]
if creationOptions is not None:
for opt in creationOptions:
new_options += ['-co', opt]
if white:
new_options += ['-white']
if colors is not None:
for color in colors:
color_str = ''
for cpt in color:
if color_str != '':
color_str += ','
color_str += str(cpt)
new_options += ['-color', color_str]
if maxNonBlack is not None:
new_options += ['-nb', str(maxNonBlack)]
if nearDist is not None:
new_options += ['-near', str(nearDist)]
if setAlpha:
new_options += ['-setalpha']
if setMask:
new_options += ['-setmask']
return (GDALNearblackOptions(new_options), callback, callback_data)
def Nearblack(destNameOrDestDS, srcDS, **kwargs):
""" Convert nearly black/white borders to exact value.
Arguments are :
destNameOrDestDS --- Output dataset name or object
srcDS --- a Dataset object or a filename
Keyword arguments are :
options --- return of gdal.NearblackOptions(), string or array of strings
other keywords arguments of gdal.NearblackOptions()
If options is provided as a gdal.NearblackOptions() object, other keywords are ignored. """
if 'options' not in kwargs or isinstance(kwargs['options'], (list, str)):
(opts, callback, callback_data) = NearblackOptions(**kwargs)
else:
(opts, callback, callback_data) = kwargs['options']
if isinstance(srcDS, str):
srcDS = OpenEx(srcDS)
if isinstance(destNameOrDestDS, str):
return wrapper_GDALNearblackDestName(destNameOrDestDS, srcDS, opts, callback, callback_data)
else:
return wrapper_GDALNearblackDestDS(destNameOrDestDS, srcDS, opts, callback, callback_data)
def GridOptions(options=None, format=None,
outputType=gdalconst.GDT_Unknown,
width=0, height=0,
creationOptions=None,
outputBounds=None,
outputSRS=None,
noData=None,
algorithm=None,
layers=None,
SQLStatement=None,
where=None,
spatFilter=None,
zfield=None,
z_increase=None,
z_multiply=None,
callback=None, callback_data=None):
""" Create a GridOptions() object that can be passed to gdal.Grid()
Keyword arguments are :
options --- can be be an array of strings, a string or let empty and filled from other keywords.
format --- output format ("GTiff", etc...)
outputType --- output type (gdalconst.GDT_Byte, etc...)
width --- width of the output raster in pixel
height --- height of the output raster in pixel
creationOptions --- list of creation options
outputBounds --- assigned output bounds: [ulx, uly, lrx, lry]
outputSRS --- assigned output SRS
noData --- nodata value
algorithm --- e.g "invdist:power=2.0:smoothing=0.0:radius1=0.0:radius2=0.0:angle=0.0:max_points=0:min_points=0:nodata=0.0"
layers --- list of layers to convert
SQLStatement --- SQL statement to apply to the source dataset
where --- WHERE clause to apply to source layer(s)
spatFilter --- spatial filter as (minX, minY, maxX, maxY) bounding box
zfield --- Identifies an attribute field on the features to be used to get a Z value from. This value overrides Z value read from feature geometry record.
z_increase --- Addition to the attribute field on the features to be used to get a Z value from. The addition should be the same unit as Z value. The result value will be Z value + Z increase value. The default value is 0.
z_multiply - Multiplication ratio for Z field. This can be used for shift from e.g. foot to meters or from elevation to deep. The result value will be (Z value + Z increase value) * Z multiply value. The default value is 1.
callback --- callback method
callback_data --- user data for callback
"""
options = [] if options is None else options
if isinstance(options, str):
new_options = ParseCommandLine(options)
else:
new_options = options
if format is not None:
new_options += ['-of', format]
if outputType != gdalconst.GDT_Unknown:
new_options += ['-ot', GetDataTypeName(outputType)]
if width != 0 or height != 0:
new_options += ['-outsize', str(width), str(height)]
if creationOptions is not None:
for opt in creationOptions:
new_options += ['-co', opt]
if outputBounds is not None:
new_options += ['-txe', _strHighPrec(outputBounds[0]), _strHighPrec(outputBounds[2]), '-tye', _strHighPrec(outputBounds[1]), _strHighPrec(outputBounds[3])]
if outputSRS is not None:
new_options += ['-a_srs', str(outputSRS)]
if algorithm is not None:
new_options += ['-a', algorithm]
if layers is not None:
if isinstance(layers, (tuple, list)):
for layer in layers:
new_options += ['-l', layer]
else:
new_options += ['-l', layers]
if SQLStatement is not None:
new_options += ['-sql', str(SQLStatement)]
if where is not None:
new_options += ['-where', str(where)]
if zfield is not None:
new_options += ['-zfield', zfield]
if z_increase is not None:
new_options += ['-z_increase', str(z_increase)]
if z_multiply is not None:
new_options += ['-z_multiply', str(z_multiply)]
if spatFilter is not None:
new_options += ['-spat', str(spatFilter[0]), str(spatFilter[1]), str(spatFilter[2]), str(spatFilter[3])]
return (GDALGridOptions(new_options), callback, callback_data)
def Grid(destName, srcDS, **kwargs):
""" Create raster from the scattered data.
Arguments are :
destName --- Output dataset name
srcDS --- a Dataset object or a filename
Keyword arguments are :
options --- return of gdal.GridOptions(), string or array of strings
other keywords arguments of gdal.GridOptions()
If options is provided as a gdal.GridOptions() object, other keywords are ignored. """
if 'options' not in kwargs or isinstance(kwargs['options'], (list, str)):
(opts, callback, callback_data) = GridOptions(**kwargs)
else:
(opts, callback, callback_data) = kwargs['options']
if isinstance(srcDS, str):
srcDS = OpenEx(srcDS, gdalconst.OF_VECTOR)
return GridInternal(destName, srcDS, opts, callback, callback_data)
def RasterizeOptions(options=None, format=None,
outputType=gdalconst.GDT_Unknown,
creationOptions=None, noData=None, initValues=None,
outputBounds=None, outputSRS=None,
transformerOptions=None,
width=None, height=None,
xRes=None, yRes=None, targetAlignedPixels=False,
bands=None, inverse=False, allTouched=False,
burnValues=None, attribute=None, useZ=False, layers=None,
SQLStatement=None, SQLDialect=None, where=None, optim=None,
add=None,
callback=None, callback_data=None):
""" Create a RasterizeOptions() object that can be passed to gdal.Rasterize()
Keyword arguments are :
options --- can be be an array of strings, a string or let empty and filled from other keywords.
format --- output format ("GTiff", etc...)
outputType --- output type (gdalconst.GDT_Byte, etc...)
creationOptions --- list of creation options
outputBounds --- assigned output bounds: [minx, miny, maxx, maxy]
outputSRS --- assigned output SRS
transformerOptions --- list of transformer options
width --- width of the output raster in pixel
height --- height of the output raster in pixel
xRes, yRes --- output resolution in target SRS
targetAlignedPixels --- whether to force output bounds to be multiple of output resolution
noData --- nodata value
initValues --- Value or list of values to pre-initialize the output image bands with. However, it is not marked as the nodata value in the output file. If only one value is given, the same value is used in all the bands.
bands --- list of output bands to burn values into
inverse --- whether to invert rasterization, i.e. burn the fixed burn value, or the burn value associated with the first feature into all parts of the image not inside the provided a polygon.
allTouched -- whether to enable the ALL_TOUCHED rasterization option so that all pixels touched by lines or polygons will be updated, not just those on the line render path, or whose center point is within the polygon.
burnValues -- list of fixed values to burn into each band for all objects. Excusive with attribute.
attribute --- identifies an attribute field on the features to be used for a burn-in value. The value will be burned into all output bands. Excusive with burnValues.
useZ --- whether to indicate that a burn value should be extracted from the "Z" values of the feature. These values are added to the burn value given by burnValues or attribute if provided. As of now, only points and lines are drawn in 3D.
layers --- list of layers from the datasource that will be used for input features.
SQLStatement --- SQL statement to apply to the source dataset
SQLDialect --- SQL dialect ('OGRSQL', 'SQLITE', ...)
where --- WHERE clause to apply to source layer(s)
optim --- optimization mode ('RASTER', 'VECTOR')
add --- set to True to use additive mode instead of replace when burning values
callback --- callback method
callback_data --- user data for callback
"""
options = [] if options is None else options
if isinstance(options, str):
new_options = ParseCommandLine(options)
else:
new_options = options
if format is not None:
new_options += ['-of', format]
if outputType != gdalconst.GDT_Unknown:
new_options += ['-ot', GetDataTypeName(outputType)]
if creationOptions is not None:
for opt in creationOptions:
new_options += ['-co', opt]
if bands is not None:
for b in bands:
new_options += ['-b', str(b)]
if noData is not None:
new_options += ['-a_nodata', str(noData)]
if initValues is not None:
if isinstance(initValues, (tuple, list)):
for val in initValues:
new_options += ['-init', str(val)]
else:
new_options += ['-init', str(initValues)]
if outputBounds is not None:
new_options += ['-te', _strHighPrec(outputBounds[0]), _strHighPrec(outputBounds[1]), _strHighPrec(outputBounds[2]), _strHighPrec(outputBounds[3])]
if outputSRS is not None:
new_options += ['-a_srs', str(outputSRS)]
if transformerOptions is not None:
for opt in transformerOptions:
new_options += ['-to', opt]
if width is not None and height is not None:
new_options += ['-ts', str(width), str(height)]
if xRes is not None and yRes is not None:
new_options += ['-tr', _strHighPrec(xRes), _strHighPrec(yRes)]
if targetAlignedPixels:
new_options += ['-tap']
if inverse:
new_options += ['-i']
if allTouched:
new_options += ['-at']
if burnValues is not None:
if attribute is not None:
raise Exception('burnValues and attribute option are exclusive.')
if isinstance(burnValues, (tuple, list)):
for val in burnValues:
new_options += ['-burn', str(val)]
else:
new_options += ['-burn', str(burnValues)]
if attribute is not None:
new_options += ['-a', attribute]
if useZ:
new_options += ['-3d']
if layers is not None:
if isinstance(layers, ((tuple, list))):
for layer in layers:
new_options += ['-l', layer]
else:
new_options += ['-l', layers]
if SQLStatement is not None:
new_options += ['-sql', str(SQLStatement)]
if SQLDialect is not None:
new_options += ['-dialect', str(SQLDialect)]
if where is not None:
new_options += ['-where', str(where)]
if optim is not None:
new_options += ['-optim', str(optim)]
if add:
new_options += ['-add']
return (GDALRasterizeOptions(new_options), callback, callback_data)
def Rasterize(destNameOrDestDS, srcDS, **kwargs):
""" Burns vector geometries into a raster
Arguments are :
destNameOrDestDS --- Output dataset name or object
srcDS --- a Dataset object or a filename
Keyword arguments are :
options --- return of gdal.RasterizeOptions(), string or array of strings
other keywords arguments of gdal.RasterizeOptions()
If options is provided as a gdal.RasterizeOptions() object, other keywords are ignored. """
if 'options' not in kwargs or isinstance(kwargs['options'], (list, str)):
(opts, callback, callback_data) = RasterizeOptions(**kwargs)
else:
(opts, callback, callback_data) = kwargs['options']
if isinstance(srcDS, str):
srcDS = OpenEx(srcDS, gdalconst.OF_VECTOR)
if isinstance(destNameOrDestDS, str):
return wrapper_GDALRasterizeDestName(destNameOrDestDS, srcDS, opts, callback, callback_data)
else:
return wrapper_GDALRasterizeDestDS(destNameOrDestDS, srcDS, opts, callback, callback_data)
def BuildVRTOptions(options=None,
resolution=None,
outputBounds=None,
xRes=None, yRes=None,
targetAlignedPixels=None,
separate=None,
bandList=None,
addAlpha=None,
resampleAlg=None,
outputSRS=None,
allowProjectionDifference=None,
srcNodata=None,
VRTNodata=None,
hideNodata=None,
callback=None, callback_data=None):
""" Create a BuildVRTOptions() object that can be passed to gdal.BuildVRT()
Keyword arguments are :
options --- can be be an array of strings, a string or let empty and filled from other keywords..
resolution --- 'highest', 'lowest', 'average', 'user'.
outputBounds --- output bounds as (minX, minY, maxX, maxY) in target SRS.
xRes, yRes --- output resolution in target SRS.
targetAlignedPixels --- whether to force output bounds to be multiple of output resolution.
separate --- whether each source file goes into a separate stacked band in the VRT band.
bandList --- array of band numbers (index start at 1).
addAlpha --- whether to add an alpha mask band to the VRT when the source raster have none.
resampleAlg --- resampling mode.
outputSRS --- assigned output SRS.
allowProjectionDifference --- whether to accept input datasets have not the same projection. Note: they will *not* be reprojected.
srcNodata --- source nodata value(s).
VRTNodata --- nodata values at the VRT band level.
hideNodata --- whether to make the VRT band not report the NoData value.
callback --- callback method.
callback_data --- user data for callback.
"""
# Only used for tests
return_option_list = options == '__RETURN_OPTION_LIST__'
if return_option_list:
options = []
else:
options = [] if options is None else options
if isinstance(options, str):
new_options = ParseCommandLine(options)
else:
new_options = options
if resolution is not None:
new_options += ['-resolution', str(resolution)]
if outputBounds is not None:
new_options += ['-te', _strHighPrec(outputBounds[0]), _strHighPrec(outputBounds[1]), _strHighPrec(outputBounds[2]), _strHighPrec(outputBounds[3])]
if xRes is not None and yRes is not None:
new_options += ['-tr', _strHighPrec(xRes), _strHighPrec(yRes)]
if targetAlignedPixels:
new_options += ['-tap']
if separate:
new_options += ['-separate']
if bandList != None:
for b in bandList:
new_options += ['-b', str(b)]
if addAlpha:
new_options += ['-addalpha']
if resampleAlg is not None:
if resampleAlg in mapGRIORAMethodToString:
new_options += ['-r', mapGRIORAMethodToString[resampleAlg]]
else:
new_options += ['-r', str(resampleAlg)]
if outputSRS is not None:
new_options += ['-a_srs', str(outputSRS)]
if allowProjectionDifference:
new_options += ['-allow_projection_difference']
if srcNodata is not None:
new_options += ['-srcnodata', str(srcNodata)]
if VRTNodata is not None:
new_options += ['-vrtnodata', str(VRTNodata)]
if hideNodata:
new_options += ['-hidenodata']
if return_option_list:
return new_options
return (GDALBuildVRTOptions(new_options), callback, callback_data)
def BuildVRT(destName, srcDSOrSrcDSTab, **kwargs):
""" Build a VRT from a list of datasets.
Arguments are :
destName --- Output dataset name
srcDSOrSrcDSTab --- an array of Dataset objects or filenames, or a Dataset object or a filename
Keyword arguments are :
options --- return of gdal.BuildVRTOptions(), string or array of strings
other keywords arguments of gdal.BuildVRTOptions()
If options is provided as a gdal.BuildVRTOptions() object, other keywords are ignored. """
if 'options' not in kwargs or isinstance(kwargs['options'], (list, str)):
(opts, callback, callback_data) = BuildVRTOptions(**kwargs)
else:
(opts, callback, callback_data) = kwargs['options']
srcDSTab = []
srcDSNamesTab = []
if isinstance(srcDSOrSrcDSTab, str):
srcDSNamesTab = [srcDSOrSrcDSTab]
elif isinstance(srcDSOrSrcDSTab, list):
for elt in srcDSOrSrcDSTab:
if isinstance(elt, str):
srcDSNamesTab.append(elt)
else:
srcDSTab.append(elt)
if srcDSTab and srcDSNamesTab:
raise Exception('Mix of names and dataset objects not supported')
else:
srcDSTab = [srcDSOrSrcDSTab]
if srcDSTab:
return BuildVRTInternalObjects(destName, srcDSTab, opts, callback, callback_data)
else:
return BuildVRTInternalNames(destName, srcDSNamesTab, opts, callback, callback_data)
def MultiDimTranslateOptions(options=None, format=None, creationOptions=None,
arraySpecs=None, groupSpecs=None, subsetSpecs=None, scaleAxesSpecs=None,
callback=None, callback_data=None):
""" Create a MultiDimTranslateOptions() object that can be passed to gdal.MultiDimTranslate()
Keyword arguments are :
options --- can be be an array of strings, a string or let empty and filled from other keywords.
format --- output format ("GTiff", etc...)
creationOptions --- list of creation options
arraySpecs -- list of array specifications, each of them being an array name or "name={src_array_name},dstname={dst_name},transpose=[1,0],view=[:,::-1]"
groupSpecs -- list of group specifications, each of them being a group name or "name={src_array_name},dstname={dst_name},recursive=no"
subsetSpecs -- list of subset specifications, each of them being like "{dim_name}({min_val},{max_val})" or "{dim_name}({slice_va})"
scaleAxesSpecs -- list of dimension scaling specifications, each of them being like "{dim_name}({scale_factor})"
callback --- callback method
callback_data --- user data for callback
"""
options = [] if options is None else options
if isinstance(options, str):
new_options = ParseCommandLine(options)
else:
new_options = options
if format is not None:
new_options += ['-of', format]
if creationOptions is not None:
for opt in creationOptions:
new_options += ['-co', opt]
if arraySpecs is not None:
for s in arraySpecs:
new_options += ['-array', s]
if groupSpecs is not None:
for s in groupSpecs:
new_options += ['-group', s]
if subsetSpecs is not None:
for s in subsetSpecs:
new_options += ['-subset', s]
if scaleAxesSpecs is not None:
for s in scaleAxesSpecs:
new_options += ['-scaleaxes', s]
return (GDALMultiDimTranslateOptions(new_options), callback, callback_data)
def MultiDimTranslate(destName, srcDSOrSrcDSTab, **kwargs):
""" MultiDimTranslate one or several datasets.
Arguments are :
destName --- Output dataset name
srcDSOrSrcDSTab --- an array of Dataset objects or filenames, or a Dataset object or a filename
Keyword arguments are :
options --- return of gdal.MultiDimTranslateOptions(), string or array of strings
other keywords arguments of gdal.MultiDimTranslateOptions()
If options is provided as a gdal.MultiDimTranslateOptions() object, other keywords are ignored. """
if 'options' not in kwargs or isinstance(kwargs['options'], (list, str)):
(opts, callback, callback_data) = MultiDimTranslateOptions(**kwargs)
else:
(opts, callback, callback_data) = kwargs['options']
if isinstance(srcDSOrSrcDSTab, str):
srcDSTab = [OpenEx(srcDSOrSrcDSTab, OF_VERBOSE_ERROR | OF_RASTER | OF_MULTIDIM_RASTER)]
elif isinstance(srcDSOrSrcDSTab, list):
srcDSTab = []
for elt in srcDSOrSrcDSTab:
if isinstance(elt, str):
srcDSTab.append(OpenEx(elt, OF_VERBOSE_ERROR | OF_RASTER | OF_MULTIDIM_RASTER))
else:
srcDSTab.append(elt)
else:
srcDSTab = [srcDSOrSrcDSTab]
return wrapper_GDALMultiDimTranslateDestName(destName, srcDSTab, opts, callback, callback_data)
# Logging Helpers
def _pylog_handler(err_level, err_no, err_msg):
if err_no != gdalconst.CPLE_None:
typ = _pylog_handler.errcode_map.get(err_no, str(err_no))
message = "%s: %s" % (typ, err_msg)
else:
message = err_msg
level = _pylog_handler.level_map.get(err_level, 20) # default level is INFO
_pylog_handler.logger.log(level, message)
def ConfigurePythonLogging(logger_name='gdal', enable_debug=False):
""" Configure GDAL to use Python's logging framework """
import logging
_pylog_handler.logger = logging.getLogger(logger_name)
# map CPLE_* constants to names
_pylog_handler.errcode_map = {_num: _name[5:] for _name, _num in gdalconst.__dict__.items() if _name.startswith('CPLE_')}
# Map GDAL log levels to Python's
_pylog_handler.level_map = {
CE_None: logging.INFO,
CE_Debug: logging.DEBUG,
CE_Warning: logging.WARN,
CE_Failure: logging.ERROR,
CE_Fatal: logging.CRITICAL,
}
# Set CPL_DEBUG so debug messages are passed through the logger
if enable_debug:
SetConfigOption("CPL_DEBUG", "ON")
# Install as the default GDAL log handler
SetErrorHandler(_pylog_handler)
def Debug(*args):
"""Debug(char const * msg_class, char const * message)"""
return _gdal.Debug(*args)
def SetErrorHandler(*args):
"""SetErrorHandler(CPLErrorHandler pfnErrorHandler=0) -> CPLErr"""
return _gdal.SetErrorHandler(*args)
def SetCurrentErrorHandlerCatchDebug(*args):
"""SetCurrentErrorHandlerCatchDebug(int bCatchDebug)"""
return _gdal.SetCurrentErrorHandlerCatchDebug(*args)
def PushErrorHandler(*args):
"""PushErrorHandler(CPLErrorHandler pfnErrorHandler=0) -> CPLErr"""
return _gdal.PushErrorHandler(*args)
def PopErrorHandler(*args):
"""PopErrorHandler()"""
return _gdal.PopErrorHandler(*args)
def Error(*args):
"""Error(CPLErr msg_class, int err_code=0, char const * msg)"""
return _gdal.Error(*args)
def GOA2GetAuthorizationURL(*args):
"""GOA2GetAuthorizationURL(char const * pszScope) -> retStringAndCPLFree *"""
return _gdal.GOA2GetAuthorizationURL(*args)
def GOA2GetRefreshToken(*args):
"""GOA2GetRefreshToken(char const * pszAuthToken, char const * pszScope) -> retStringAndCPLFree *"""
return _gdal.GOA2GetRefreshToken(*args)
def GOA2GetAccessToken(*args):
"""GOA2GetAccessToken(char const * pszRefreshToken, char const * pszScope) -> retStringAndCPLFree *"""
return _gdal.GOA2GetAccessToken(*args)
def ErrorReset(*args):
"""ErrorReset()"""
return _gdal.ErrorReset(*args)
def EscapeString(*args, **kwargs):
"""EscapeString(int len, int scheme) -> retStringAndCPLFree *"""
return _gdal.EscapeString(*args, **kwargs)
def GetLastErrorNo(*args):
"""GetLastErrorNo() -> int"""
return _gdal.GetLastErrorNo(*args)
def GetLastErrorType(*args):
"""GetLastErrorType() -> int"""
return _gdal.GetLastErrorType(*args)
def GetLastErrorMsg(*args):
"""GetLastErrorMsg() -> char const *"""
return _gdal.GetLastErrorMsg(*args)
def GetErrorCounter(*args):
"""GetErrorCounter() -> unsigned int"""
return _gdal.GetErrorCounter(*args)
def VSIGetLastErrorNo(*args):
"""VSIGetLastErrorNo() -> int"""
return _gdal.VSIGetLastErrorNo(*args)
def VSIGetLastErrorMsg(*args):
"""VSIGetLastErrorMsg() -> char const *"""
return _gdal.VSIGetLastErrorMsg(*args)
def VSIErrorReset(*args):
"""VSIErrorReset()"""
return _gdal.VSIErrorReset(*args)
def PushFinderLocation(*args):
"""PushFinderLocation(char const * utf8_path)"""
return _gdal.PushFinderLocation(*args)
def PopFinderLocation(*args):
"""PopFinderLocation()"""
return _gdal.PopFinderLocation(*args)
def FinderClean(*args):
"""FinderClean()"""
return _gdal.FinderClean(*args)
def FindFile(*args):
"""FindFile(char const * pszClass, char const * utf8_path) -> char const *"""
return _gdal.FindFile(*args)
def ReadDir(*args):
"""ReadDir(char const * utf8_path, int nMaxFiles=0) -> char **"""
return _gdal.ReadDir(*args)
def ReadDirRecursive(*args):
"""ReadDirRecursive(char const * utf8_path) -> char **"""
return _gdal.ReadDirRecursive(*args)
def OpenDir(*args):
"""OpenDir(char const * utf8_path, int nRecurseDepth=-1, char ** options=None) -> VSIDIR *"""
return _gdal.OpenDir(*args)
class DirEntry(_object):
"""Proxy of C++ DirEntry class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, DirEntry, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, DirEntry, name)
__repr__ = _swig_repr
__swig_getmethods__["name"] = _gdal.DirEntry_name_get
if _newclass:
name = _swig_property(_gdal.DirEntry_name_get)
__swig_getmethods__["mode"] = _gdal.DirEntry_mode_get
if _newclass:
mode = _swig_property(_gdal.DirEntry_mode_get)
__swig_getmethods__["size"] = _gdal.DirEntry_size_get
if _newclass:
size = _swig_property(_gdal.DirEntry_size_get)
__swig_getmethods__["mtime"] = _gdal.DirEntry_mtime_get
if _newclass:
mtime = _swig_property(_gdal.DirEntry_mtime_get)
__swig_getmethods__["modeKnown"] = _gdal.DirEntry_modeKnown_get
if _newclass:
modeKnown = _swig_property(_gdal.DirEntry_modeKnown_get)
__swig_getmethods__["sizeKnown"] = _gdal.DirEntry_sizeKnown_get
if _newclass:
sizeKnown = _swig_property(_gdal.DirEntry_sizeKnown_get)
__swig_getmethods__["mtimeKnown"] = _gdal.DirEntry_mtimeKnown_get
if _newclass:
mtimeKnown = _swig_property(_gdal.DirEntry_mtimeKnown_get)
__swig_getmethods__["extra"] = _gdal.DirEntry_extra_get
if _newclass:
extra = _swig_property(_gdal.DirEntry_extra_get)
def __init__(self, *args):
"""__init__(DirEntry self, DirEntry entryIn) -> DirEntry"""
this = _gdal.new_DirEntry(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_DirEntry
__del__ = lambda self: None
def IsDirectory(self, *args):
"""IsDirectory(DirEntry self) -> bool"""
return _gdal.DirEntry_IsDirectory(self, *args)
DirEntry_swigregister = _gdal.DirEntry_swigregister
DirEntry_swigregister(DirEntry)
def GetNextDirEntry(*args):
"""GetNextDirEntry(VSIDIR * dir) -> DirEntry"""
return _gdal.GetNextDirEntry(*args)
def CloseDir(*args):
"""CloseDir(VSIDIR * dir)"""
return _gdal.CloseDir(*args)
def SetConfigOption(*args):
"""SetConfigOption(char const * pszKey, char const * pszValue)"""
return _gdal.SetConfigOption(*args)
def GetConfigOption(*args):
"""GetConfigOption(char const * pszKey, char const * pszDefault=None) -> char const *"""
return _gdal.GetConfigOption(*args)
def CPLBinaryToHex(*args):
"""CPLBinaryToHex(int nBytes) -> retStringAndCPLFree *"""
return _gdal.CPLBinaryToHex(*args)
def CPLHexToBinary(*args):
"""CPLHexToBinary(char const * pszHex, int * pnBytes) -> GByte *"""
return _gdal.CPLHexToBinary(*args)
def FileFromMemBuffer(*args):
"""FileFromMemBuffer(char const * utf8_path, GIntBig nBytes)"""
return _gdal.FileFromMemBuffer(*args)
def Unlink(*args):
"""Unlink(char const * utf8_path) -> VSI_RETVAL"""
return _gdal.Unlink(*args)
def UnlinkBatch(*args):
"""UnlinkBatch(char ** files) -> bool"""
return _gdal.UnlinkBatch(*args)
def HasThreadSupport(*args):
"""HasThreadSupport() -> int"""
return _gdal.HasThreadSupport(*args)
def Mkdir(*args):
"""Mkdir(char const * utf8_path, int mode) -> VSI_RETVAL"""
return _gdal.Mkdir(*args)
def Rmdir(*args):
"""Rmdir(char const * utf8_path) -> VSI_RETVAL"""
return _gdal.Rmdir(*args)
def MkdirRecursive(*args):
"""MkdirRecursive(char const * utf8_path, int mode) -> VSI_RETVAL"""
return _gdal.MkdirRecursive(*args)
def RmdirRecursive(*args):
"""RmdirRecursive(char const * utf8_path) -> VSI_RETVAL"""
return _gdal.RmdirRecursive(*args)
def Rename(*args):
"""Rename(char const * pszOld, char const * pszNew) -> VSI_RETVAL"""
return _gdal.Rename(*args)
def Sync(*args, **kwargs):
"""Sync(char const * pszSource, char const * pszTarget, char ** options=None, GDALProgressFunc callback=0, void * callback_data=None) -> bool"""
return _gdal.Sync(*args, **kwargs)
def GetActualURL(*args):
"""GetActualURL(char const * utf8_path) -> char const *"""
return _gdal.GetActualURL(*args)
def GetSignedURL(*args):
"""GetSignedURL(char const * utf8_path, char ** options=None) -> retStringAndCPLFree *"""
return _gdal.GetSignedURL(*args)
def GetFileSystemsPrefixes(*args):
"""GetFileSystemsPrefixes() -> char **"""
return _gdal.GetFileSystemsPrefixes(*args)
def GetFileSystemOptions(*args):
"""GetFileSystemOptions(char const * utf8_path) -> char const *"""
return _gdal.GetFileSystemOptions(*args)
class VSILFILE(_object):
"""Proxy of C++ VSILFILE class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, VSILFILE, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, VSILFILE, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
VSILFILE_swigregister = _gdal.VSILFILE_swigregister
VSILFILE_swigregister(VSILFILE)
VSI_STAT_EXISTS_FLAG = _gdal.VSI_STAT_EXISTS_FLAG
VSI_STAT_NATURE_FLAG = _gdal.VSI_STAT_NATURE_FLAG
VSI_STAT_SIZE_FLAG = _gdal.VSI_STAT_SIZE_FLAG
class StatBuf(_object):
"""Proxy of C++ StatBuf class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, StatBuf, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, StatBuf, name)
__repr__ = _swig_repr
__swig_getmethods__["mode"] = _gdal.StatBuf_mode_get
if _newclass:
mode = _swig_property(_gdal.StatBuf_mode_get)
__swig_getmethods__["size"] = _gdal.StatBuf_size_get
if _newclass:
size = _swig_property(_gdal.StatBuf_size_get)
__swig_getmethods__["mtime"] = _gdal.StatBuf_mtime_get
if _newclass:
mtime = _swig_property(_gdal.StatBuf_mtime_get)
def __init__(self, *args):
"""__init__(StatBuf self, StatBuf psStatBuf) -> StatBuf"""
this = _gdal.new_StatBuf(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_StatBuf
__del__ = lambda self: None
def IsDirectory(self, *args):
"""IsDirectory(StatBuf self) -> int"""
return _gdal.StatBuf_IsDirectory(self, *args)
StatBuf_swigregister = _gdal.StatBuf_swigregister
StatBuf_swigregister(StatBuf)
def VSIStatL(*args):
"""VSIStatL(char const * utf8_path, int nFlags=0) -> int"""
return _gdal.VSIStatL(*args)
def GetFileMetadata(*args):
"""GetFileMetadata(char const * utf8_path, char const * domain, char ** options=None) -> char **"""
return _gdal.GetFileMetadata(*args)
def SetFileMetadata(*args):
"""SetFileMetadata(char const * utf8_path, char ** metadata, char const * domain, char ** options=None) -> bool"""
return _gdal.SetFileMetadata(*args)
def VSIFOpenL(*args):
"""VSIFOpenL(char const * utf8_path, char const * pszMode) -> VSILFILE"""
return _gdal.VSIFOpenL(*args)
def VSIFOpenExL(*args):
"""VSIFOpenExL(char const * utf8_path, char const * pszMode, int bSetError=False, char ** options=None) -> VSILFILE"""
return _gdal.VSIFOpenExL(*args)
def VSIFEofL(*args):
"""VSIFEofL(VSILFILE fp) -> int"""
return _gdal.VSIFEofL(*args)
def VSIFFlushL(*args):
"""VSIFFlushL(VSILFILE fp) -> int"""
return _gdal.VSIFFlushL(*args)
def VSIFCloseL(*args):
"""VSIFCloseL(VSILFILE fp) -> VSI_RETVAL"""
return _gdal.VSIFCloseL(*args)
def VSIFSeekL(*args):
"""VSIFSeekL(VSILFILE fp, GIntBig offset, int whence) -> int"""
return _gdal.VSIFSeekL(*args)
def VSIFTellL(*args):
"""VSIFTellL(VSILFILE fp) -> GIntBig"""
return _gdal.VSIFTellL(*args)
def VSIFTruncateL(*args):
"""VSIFTruncateL(VSILFILE fp, GIntBig length) -> int"""
return _gdal.VSIFTruncateL(*args)
def VSISupportsSparseFiles(*args):
"""VSISupportsSparseFiles(char const * utf8_path) -> int"""
return _gdal.VSISupportsSparseFiles(*args)
VSI_RANGE_STATUS_UNKNOWN = _gdal.VSI_RANGE_STATUS_UNKNOWN
VSI_RANGE_STATUS_DATA = _gdal.VSI_RANGE_STATUS_DATA
VSI_RANGE_STATUS_HOLE = _gdal.VSI_RANGE_STATUS_HOLE
def VSIFGetRangeStatusL(*args):
"""VSIFGetRangeStatusL(VSILFILE fp, GIntBig offset, GIntBig length) -> int"""
return _gdal.VSIFGetRangeStatusL(*args)
def VSIFWriteL(*args):
"""VSIFWriteL(int nLen, int size, int memb, VSILFILE fp) -> int"""
return _gdal.VSIFWriteL(*args)
def VSICurlClearCache(*args):
"""VSICurlClearCache()"""
return _gdal.VSICurlClearCache(*args)
def VSICurlPartialClearCache(*args):
"""VSICurlPartialClearCache(char const * utf8_path)"""
return _gdal.VSICurlPartialClearCache(*args)
def NetworkStatsReset(*args):
"""NetworkStatsReset()"""
return _gdal.NetworkStatsReset(*args)
def NetworkStatsGetAsSerializedJSON(*args):
"""NetworkStatsGetAsSerializedJSON(char ** options=None) -> retStringAndCPLFree *"""
return _gdal.NetworkStatsGetAsSerializedJSON(*args)
def ParseCommandLine(*args):
"""ParseCommandLine(char const * utf8_path) -> char **"""
return _gdal.ParseCommandLine(*args)
class MajorObject(_object):
"""Proxy of C++ GDALMajorObjectShadow class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, MajorObject, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, MajorObject, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def GetDescription(self, *args):
"""GetDescription(MajorObject self) -> char const *"""
return _gdal.MajorObject_GetDescription(self, *args)
def SetDescription(self, *args):
"""SetDescription(MajorObject self, char const * pszNewDesc)"""
return _gdal.MajorObject_SetDescription(self, *args)
def GetMetadataDomainList(self, *args):
"""GetMetadataDomainList(MajorObject self) -> char **"""
return _gdal.MajorObject_GetMetadataDomainList(self, *args)
def GetMetadata_Dict(self, *args):
"""GetMetadata_Dict(MajorObject self, char const * pszDomain) -> char **"""
return _gdal.MajorObject_GetMetadata_Dict(self, *args)
def GetMetadata_List(self, *args):
"""GetMetadata_List(MajorObject self, char const * pszDomain) -> char **"""
return _gdal.MajorObject_GetMetadata_List(self, *args)
def SetMetadata(self, *args):
"""
SetMetadata(MajorObject self, char ** papszMetadata, char const * pszDomain) -> CPLErr
SetMetadata(MajorObject self, char * pszMetadataString, char const * pszDomain) -> CPLErr
"""
return _gdal.MajorObject_SetMetadata(self, *args)
def GetMetadataItem(self, *args):
"""GetMetadataItem(MajorObject self, char const * pszName, char const * pszDomain) -> char const *"""
return _gdal.MajorObject_GetMetadataItem(self, *args)
def SetMetadataItem(self, *args):
"""SetMetadataItem(MajorObject self, char const * pszName, char const * pszValue, char const * pszDomain) -> CPLErr"""
return _gdal.MajorObject_SetMetadataItem(self, *args)
def GetMetadata(self, domain=''):
if domain and domain[:4] == 'xml:':
return self.GetMetadata_List(domain)
return self.GetMetadata_Dict(domain)
MajorObject_swigregister = _gdal.MajorObject_swigregister
MajorObject_swigregister(MajorObject)
class Driver(MajorObject):
"""Proxy of C++ GDALDriverShadow class."""
__swig_setmethods__ = {}
for _s in [MajorObject]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, Driver, name, value)
__swig_getmethods__ = {}
for _s in [MajorObject]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, Driver, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_getmethods__["ShortName"] = _gdal.Driver_ShortName_get
if _newclass:
ShortName = _swig_property(_gdal.Driver_ShortName_get)
__swig_getmethods__["LongName"] = _gdal.Driver_LongName_get
if _newclass:
LongName = _swig_property(_gdal.Driver_LongName_get)
__swig_getmethods__["HelpTopic"] = _gdal.Driver_HelpTopic_get
if _newclass:
HelpTopic = _swig_property(_gdal.Driver_HelpTopic_get)
def Create(self, *args, **kwargs):
"""Create(Driver self, char const * utf8_path, int xsize, int ysize, int bands=1, GDALDataType eType, char ** options=None) -> Dataset"""
return _gdal.Driver_Create(self, *args, **kwargs)
def CreateMultiDimensional(self, *args, **kwargs):
"""CreateMultiDimensional(Driver self, char const * utf8_path, char ** root_group_options=None, char ** options=None) -> Dataset"""
return _gdal.Driver_CreateMultiDimensional(self, *args, **kwargs)
def CreateCopy(self, *args, **kwargs):
"""CreateCopy(Driver self, char const * utf8_path, Dataset src, int strict=1, char ** options=None, GDALProgressFunc callback=0, void * callback_data=None) -> Dataset"""
return _gdal.Driver_CreateCopy(self, *args, **kwargs)
def Delete(self, *args):
"""Delete(Driver self, char const * utf8_path) -> CPLErr"""
return _gdal.Driver_Delete(self, *args)
def Rename(self, *args):
"""Rename(Driver self, char const * newName, char const * oldName) -> CPLErr"""
return _gdal.Driver_Rename(self, *args)
def CopyFiles(self, *args):
"""CopyFiles(Driver self, char const * newName, char const * oldName) -> CPLErr"""
return _gdal.Driver_CopyFiles(self, *args)
def Register(self, *args):
"""Register(Driver self) -> int"""
return _gdal.Driver_Register(self, *args)
def Deregister(self, *args):
"""Deregister(Driver self)"""
return _gdal.Driver_Deregister(self, *args)
Driver_swigregister = _gdal.Driver_swigregister
Driver_swigregister(Driver)
from sys import version_info as _swig_python_version_info
if _swig_python_version_info >= (2, 7, 0):
from . import ogr
else:
import ogr
del _swig_python_version_info
from sys import version_info as _swig_python_version_info
if _swig_python_version_info >= (2, 7, 0):
from . import osr
else:
import osr
del _swig_python_version_info
class ColorEntry(_object):
"""Proxy of C++ GDALColorEntry class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, ColorEntry, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, ColorEntry, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_setmethods__["c1"] = _gdal.ColorEntry_c1_set
__swig_getmethods__["c1"] = _gdal.ColorEntry_c1_get
if _newclass:
c1 = _swig_property(_gdal.ColorEntry_c1_get, _gdal.ColorEntry_c1_set)
__swig_setmethods__["c2"] = _gdal.ColorEntry_c2_set
__swig_getmethods__["c2"] = _gdal.ColorEntry_c2_get
if _newclass:
c2 = _swig_property(_gdal.ColorEntry_c2_get, _gdal.ColorEntry_c2_set)
__swig_setmethods__["c3"] = _gdal.ColorEntry_c3_set
__swig_getmethods__["c3"] = _gdal.ColorEntry_c3_get
if _newclass:
c3 = _swig_property(_gdal.ColorEntry_c3_get, _gdal.ColorEntry_c3_set)
__swig_setmethods__["c4"] = _gdal.ColorEntry_c4_set
__swig_getmethods__["c4"] = _gdal.ColorEntry_c4_get
if _newclass:
c4 = _swig_property(_gdal.ColorEntry_c4_get, _gdal.ColorEntry_c4_set)
ColorEntry_swigregister = _gdal.ColorEntry_swigregister
ColorEntry_swigregister(ColorEntry)
class GCP(_object):
"""Proxy of C++ GDAL_GCP class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GCP, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GCP, name)
__repr__ = _swig_repr
__swig_setmethods__["GCPX"] = _gdal.GCP_GCPX_set
__swig_getmethods__["GCPX"] = _gdal.GCP_GCPX_get
if _newclass:
GCPX = _swig_property(_gdal.GCP_GCPX_get, _gdal.GCP_GCPX_set)
__swig_setmethods__["GCPY"] = _gdal.GCP_GCPY_set
__swig_getmethods__["GCPY"] = _gdal.GCP_GCPY_get
if _newclass:
GCPY = _swig_property(_gdal.GCP_GCPY_get, _gdal.GCP_GCPY_set)
__swig_setmethods__["GCPZ"] = _gdal.GCP_GCPZ_set
__swig_getmethods__["GCPZ"] = _gdal.GCP_GCPZ_get
if _newclass:
GCPZ = _swig_property(_gdal.GCP_GCPZ_get, _gdal.GCP_GCPZ_set)
__swig_setmethods__["GCPPixel"] = _gdal.GCP_GCPPixel_set
__swig_getmethods__["GCPPixel"] = _gdal.GCP_GCPPixel_get
if _newclass:
GCPPixel = _swig_property(_gdal.GCP_GCPPixel_get, _gdal.GCP_GCPPixel_set)
__swig_setmethods__["GCPLine"] = _gdal.GCP_GCPLine_set
__swig_getmethods__["GCPLine"] = _gdal.GCP_GCPLine_get
if _newclass:
GCPLine = _swig_property(_gdal.GCP_GCPLine_get, _gdal.GCP_GCPLine_set)
__swig_setmethods__["Info"] = _gdal.GCP_Info_set
__swig_getmethods__["Info"] = _gdal.GCP_Info_get
if _newclass:
Info = _swig_property(_gdal.GCP_Info_get, _gdal.GCP_Info_set)
__swig_setmethods__["Id"] = _gdal.GCP_Id_set
__swig_getmethods__["Id"] = _gdal.GCP_Id_get
if _newclass:
Id = _swig_property(_gdal.GCP_Id_get, _gdal.GCP_Id_set)
def __init__(self, *args):
"""__init__(GDAL_GCP self, double x=0.0, double y=0.0, double z=0.0, double pixel=0.0, double line=0.0, char const * info, char const * id) -> GCP"""
this = _gdal.new_GCP(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_GCP
__del__ = lambda self: None
def __str__(self):
str = '%s (%.2fP,%.2fL) -> (%.7fE,%.7fN,%.2f) %s '\
% (self.Id, self.GCPPixel, self.GCPLine,
self.GCPX, self.GCPY, self.GCPZ, self.Info )
return str
def serialize(self, with_Z=0):
base = [gdalconst.CXT_Element,'GCP']
base.append([gdalconst.CXT_Attribute,'Id',[gdalconst.CXT_Text,self.Id]])
pixval = '%0.15E' % self.GCPPixel
lineval = '%0.15E' % self.GCPLine
xval = '%0.15E' % self.GCPX
yval = '%0.15E' % self.GCPY
zval = '%0.15E' % self.GCPZ
base.append([gdalconst.CXT_Attribute,'Pixel',[gdalconst.CXT_Text,pixval]])
base.append([gdalconst.CXT_Attribute,'Line',[gdalconst.CXT_Text,lineval]])
base.append([gdalconst.CXT_Attribute,'X',[gdalconst.CXT_Text,xval]])
base.append([gdalconst.CXT_Attribute,'Y',[gdalconst.CXT_Text,yval]])
if with_Z:
base.append([gdalconst.CXT_Attribute,'Z',[gdalconst.CXT_Text,zval]])
return base
GCP_swigregister = _gdal.GCP_swigregister
GCP_swigregister(GCP)
def GDAL_GCP_GCPX_get(*args):
"""GDAL_GCP_GCPX_get(GCP gcp) -> double"""
return _gdal.GDAL_GCP_GCPX_get(*args)
def GDAL_GCP_GCPX_set(*args):
"""GDAL_GCP_GCPX_set(GCP gcp, double dfGCPX)"""
return _gdal.GDAL_GCP_GCPX_set(*args)
def GDAL_GCP_GCPY_get(*args):
"""GDAL_GCP_GCPY_get(GCP gcp) -> double"""
return _gdal.GDAL_GCP_GCPY_get(*args)
def GDAL_GCP_GCPY_set(*args):
"""GDAL_GCP_GCPY_set(GCP gcp, double dfGCPY)"""
return _gdal.GDAL_GCP_GCPY_set(*args)
def GDAL_GCP_GCPZ_get(*args):
"""GDAL_GCP_GCPZ_get(GCP gcp) -> double"""
return _gdal.GDAL_GCP_GCPZ_get(*args)
def GDAL_GCP_GCPZ_set(*args):
"""GDAL_GCP_GCPZ_set(GCP gcp, double dfGCPZ)"""
return _gdal.GDAL_GCP_GCPZ_set(*args)
def GDAL_GCP_GCPPixel_get(*args):
"""GDAL_GCP_GCPPixel_get(GCP gcp) -> double"""
return _gdal.GDAL_GCP_GCPPixel_get(*args)
def GDAL_GCP_GCPPixel_set(*args):
"""GDAL_GCP_GCPPixel_set(GCP gcp, double dfGCPPixel)"""
return _gdal.GDAL_GCP_GCPPixel_set(*args)
def GDAL_GCP_GCPLine_get(*args):
"""GDAL_GCP_GCPLine_get(GCP gcp) -> double"""
return _gdal.GDAL_GCP_GCPLine_get(*args)
def GDAL_GCP_GCPLine_set(*args):
"""GDAL_GCP_GCPLine_set(GCP gcp, double dfGCPLine)"""
return _gdal.GDAL_GCP_GCPLine_set(*args)
def GDAL_GCP_Info_get(*args):
"""GDAL_GCP_Info_get(GCP gcp) -> char const *"""
return _gdal.GDAL_GCP_Info_get(*args)
def GDAL_GCP_Info_set(*args):
"""GDAL_GCP_Info_set(GCP gcp, char const * pszInfo)"""
return _gdal.GDAL_GCP_Info_set(*args)
def GDAL_GCP_Id_get(*args):
"""GDAL_GCP_Id_get(GCP gcp) -> char const *"""
return _gdal.GDAL_GCP_Id_get(*args)
def GDAL_GCP_Id_set(*args):
"""GDAL_GCP_Id_set(GCP gcp, char const * pszId)"""
return _gdal.GDAL_GCP_Id_set(*args)
def GCPsToGeoTransform(*args):
"""GCPsToGeoTransform(int nGCPs, int bApproxOK=1) -> RETURN_NONE"""
return _gdal.GCPsToGeoTransform(*args)
class VirtualMem(_object):
"""Proxy of C++ CPLVirtualMemShadow class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, VirtualMem, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, VirtualMem, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_destroy__ = _gdal.delete_VirtualMem
__del__ = lambda self: None
def GetAddr(self, *args):
"""GetAddr(VirtualMem self)"""
return _gdal.VirtualMem_GetAddr(self, *args)
def Pin(self, *args):
"""Pin(VirtualMem self, size_t start_offset=0, size_t nsize=0, int bWriteOp=0)"""
return _gdal.VirtualMem_Pin(self, *args)
VirtualMem_swigregister = _gdal.VirtualMem_swigregister
VirtualMem_swigregister(VirtualMem)
class AsyncReader(_object):
"""Proxy of C++ GDALAsyncReaderShadow class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, AsyncReader, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, AsyncReader, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_destroy__ = _gdal.delete_AsyncReader
__del__ = lambda self: None
def GetNextUpdatedRegion(self, *args):
"""GetNextUpdatedRegion(AsyncReader self, double timeout) -> GDALAsyncStatusType"""
return _gdal.AsyncReader_GetNextUpdatedRegion(self, *args)
def GetBuffer(self, *args):
"""GetBuffer(AsyncReader self)"""
return _gdal.AsyncReader_GetBuffer(self, *args)
def LockBuffer(self, *args):
"""LockBuffer(AsyncReader self, double timeout) -> int"""
return _gdal.AsyncReader_LockBuffer(self, *args)
def UnlockBuffer(self, *args):
"""UnlockBuffer(AsyncReader self)"""
return _gdal.AsyncReader_UnlockBuffer(self, *args)
AsyncReader_swigregister = _gdal.AsyncReader_swigregister
AsyncReader_swigregister(AsyncReader)
class Dataset(MajorObject):
"""Proxy of C++ GDALDatasetShadow class."""
__swig_setmethods__ = {}
for _s in [MajorObject]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, Dataset, name, value)
__swig_getmethods__ = {}
for _s in [MajorObject]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, Dataset, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_getmethods__["RasterXSize"] = _gdal.Dataset_RasterXSize_get
if _newclass:
RasterXSize = _swig_property(_gdal.Dataset_RasterXSize_get)
__swig_getmethods__["RasterYSize"] = _gdal.Dataset_RasterYSize_get
if _newclass:
RasterYSize = _swig_property(_gdal.Dataset_RasterYSize_get)
__swig_getmethods__["RasterCount"] = _gdal.Dataset_RasterCount_get
if _newclass:
RasterCount = _swig_property(_gdal.Dataset_RasterCount_get)
__swig_destroy__ = _gdal.delete_Dataset
__del__ = lambda self: None
def GetDriver(self, *args):
"""GetDriver(Dataset self) -> Driver"""
return _gdal.Dataset_GetDriver(self, *args)
def GetRasterBand(self, *args):
"""GetRasterBand(Dataset self, int nBand) -> Band"""
return _gdal.Dataset_GetRasterBand(self, *args)
def GetRootGroup(self, *args):
"""GetRootGroup(Dataset self) -> Group"""
return _gdal.Dataset_GetRootGroup(self, *args)
def GetProjection(self, *args):
"""GetProjection(Dataset self) -> char const *"""
return _gdal.Dataset_GetProjection(self, *args)
def GetProjectionRef(self, *args):
"""GetProjectionRef(Dataset self) -> char const *"""
return _gdal.Dataset_GetProjectionRef(self, *args)
def GetSpatialRef(self, *args):
"""GetSpatialRef(Dataset self) -> SpatialReference"""
return _gdal.Dataset_GetSpatialRef(self, *args)
def SetProjection(self, *args):
"""SetProjection(Dataset self, char const * prj) -> CPLErr"""
return _gdal.Dataset_SetProjection(self, *args)
def SetSpatialRef(self, *args):
"""SetSpatialRef(Dataset self, SpatialReference srs) -> CPLErr"""
return _gdal.Dataset_SetSpatialRef(self, *args)
def GetGeoTransform(self, *args, **kwargs):
"""GetGeoTransform(Dataset self, int * can_return_null=None)"""
return _gdal.Dataset_GetGeoTransform(self, *args, **kwargs)
def SetGeoTransform(self, *args):
"""SetGeoTransform(Dataset self, double [6] argin) -> CPLErr"""
return _gdal.Dataset_SetGeoTransform(self, *args)
def BuildOverviews(self, *args, **kwargs):
"""BuildOverviews(Dataset self, char const * resampling, int overviewlist=0, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.Dataset_BuildOverviews(self, *args, **kwargs)
def GetGCPCount(self, *args):
"""GetGCPCount(Dataset self) -> int"""
return _gdal.Dataset_GetGCPCount(self, *args)
def GetGCPProjection(self, *args):
"""GetGCPProjection(Dataset self) -> char const *"""
return _gdal.Dataset_GetGCPProjection(self, *args)
def GetGCPSpatialRef(self, *args):
"""GetGCPSpatialRef(Dataset self) -> SpatialReference"""
return _gdal.Dataset_GetGCPSpatialRef(self, *args)
def GetGCPs(self, *args):
"""GetGCPs(Dataset self)"""
return _gdal.Dataset_GetGCPs(self, *args)
def _SetGCPs(self, *args):
"""_SetGCPs(Dataset self, int nGCPs, char const * pszGCPProjection) -> CPLErr"""
return _gdal.Dataset__SetGCPs(self, *args)
def _SetGCPs2(self, *args):
"""_SetGCPs2(Dataset self, int nGCPs, SpatialReference hSRS) -> CPLErr"""
return _gdal.Dataset__SetGCPs2(self, *args)
def FlushCache(self, *args):
"""FlushCache(Dataset self)"""
return _gdal.Dataset_FlushCache(self, *args)
def AddBand(self, *args, **kwargs):
"""AddBand(Dataset self, GDALDataType datatype, char ** options=None) -> CPLErr"""
return _gdal.Dataset_AddBand(self, *args, **kwargs)
def CreateMaskBand(self, *args):
"""CreateMaskBand(Dataset self, int nFlags) -> CPLErr"""
return _gdal.Dataset_CreateMaskBand(self, *args)
def GetFileList(self, *args):
"""GetFileList(Dataset self) -> char **"""
return _gdal.Dataset_GetFileList(self, *args)
def WriteRaster(self, *args, **kwargs):
"""WriteRaster(Dataset self, int xoff, int yoff, int xsize, int ysize, GIntBig buf_len, int * buf_xsize=None, int * buf_ysize=None, GDALDataType * buf_type=None, int band_list=0, GIntBig * buf_pixel_space=None, GIntBig * buf_line_space=None, GIntBig * buf_band_space=None) -> CPLErr"""
return _gdal.Dataset_WriteRaster(self, *args, **kwargs)
def AdviseRead(self, *args):
"""AdviseRead(Dataset self, int xoff, int yoff, int xsize, int ysize, int * buf_xsize=None, int * buf_ysize=None, GDALDataType * buf_type=None, int band_list=0, char ** options=None) -> CPLErr"""
return _gdal.Dataset_AdviseRead(self, *args)
def BeginAsyncReader(self, *args, **kwargs):
"""BeginAsyncReader(Dataset self, int xOff, int yOff, int xSize, int ySize, int buf_len, int buf_xsize, int buf_ysize, GDALDataType bufType, int band_list=0, int nPixelSpace=0, int nLineSpace=0, int nBandSpace=0, char ** options=None) -> AsyncReader"""
return _gdal.Dataset_BeginAsyncReader(self, *args, **kwargs)
def EndAsyncReader(self, *args):
"""EndAsyncReader(Dataset self, AsyncReader ario)"""
return _gdal.Dataset_EndAsyncReader(self, *args)
def GetVirtualMem(self, *args, **kwargs):
"""GetVirtualMem(Dataset self, GDALRWFlag eRWFlag, int nXOff, int nYOff, int nXSize, int nYSize, int nBufXSize, int nBufYSize, GDALDataType eBufType, int band_list, int bIsBandSequential, size_t nCacheSize, size_t nPageSizeHint, char ** options=None) -> VirtualMem"""
return _gdal.Dataset_GetVirtualMem(self, *args, **kwargs)
def GetTiledVirtualMem(self, *args, **kwargs):
"""GetTiledVirtualMem(Dataset self, GDALRWFlag eRWFlag, int nXOff, int nYOff, int nXSize, int nYSize, int nTileXSize, int nTileYSize, GDALDataType eBufType, int band_list, GDALTileOrganization eTileOrganization, size_t nCacheSize, char ** options=None) -> VirtualMem"""
return _gdal.Dataset_GetTiledVirtualMem(self, *args, **kwargs)
def CreateLayer(self, *args, **kwargs):
"""CreateLayer(Dataset self, char const * name, SpatialReference srs=None, OGRwkbGeometryType geom_type, char ** options=None) -> Layer"""
return _gdal.Dataset_CreateLayer(self, *args, **kwargs)
def CopyLayer(self, *args, **kwargs):
"""CopyLayer(Dataset self, Layer src_layer, char const * new_name, char ** options=None) -> Layer"""
return _gdal.Dataset_CopyLayer(self, *args, **kwargs)
def DeleteLayer(self, *args):
"""DeleteLayer(Dataset self, int index) -> OGRErr"""
return _gdal.Dataset_DeleteLayer(self, *args)
def GetLayerCount(self, *args):
"""GetLayerCount(Dataset self) -> int"""
return _gdal.Dataset_GetLayerCount(self, *args)
def GetLayerByIndex(self, *args):
"""GetLayerByIndex(Dataset self, int index=0) -> Layer"""
return _gdal.Dataset_GetLayerByIndex(self, *args)
def GetLayerByName(self, *args):
"""GetLayerByName(Dataset self, char const * layer_name) -> Layer"""
return _gdal.Dataset_GetLayerByName(self, *args)
def ResetReading(self, *args):
"""ResetReading(Dataset self)"""
return _gdal.Dataset_ResetReading(self, *args)
def GetNextFeature(self, *args, **kwargs):
"""GetNextFeature(Dataset self, bool include_layer=True, bool include_pct=False, GDALProgressFunc callback=0, void * callback_data=None) -> Feature"""
return _gdal.Dataset_GetNextFeature(self, *args, **kwargs)
def TestCapability(self, *args):
"""TestCapability(Dataset self, char const * cap) -> bool"""
return _gdal.Dataset_TestCapability(self, *args)
def ExecuteSQL(self, *args, **kwargs):
"""ExecuteSQL(Dataset self, char const * statement, Geometry spatialFilter=None, char const * dialect) -> Layer"""
return _gdal.Dataset_ExecuteSQL(self, *args, **kwargs)
def ReleaseResultSet(self, *args):
"""ReleaseResultSet(Dataset self, Layer layer)"""
return _gdal.Dataset_ReleaseResultSet(self, *args)
def GetStyleTable(self, *args):
"""GetStyleTable(Dataset self) -> StyleTable"""
return _gdal.Dataset_GetStyleTable(self, *args)
def SetStyleTable(self, *args):
"""SetStyleTable(Dataset self, StyleTable table)"""
return _gdal.Dataset_SetStyleTable(self, *args)
def AbortSQL(self, *args):
"""AbortSQL(Dataset self) -> OGRErr"""
return _gdal.Dataset_AbortSQL(self, *args)
def StartTransaction(self, *args, **kwargs):
"""StartTransaction(Dataset self, int force=False) -> OGRErr"""
return _gdal.Dataset_StartTransaction(self, *args, **kwargs)
def CommitTransaction(self, *args):
"""CommitTransaction(Dataset self) -> OGRErr"""
return _gdal.Dataset_CommitTransaction(self, *args)
def RollbackTransaction(self, *args):
"""RollbackTransaction(Dataset self) -> OGRErr"""
return _gdal.Dataset_RollbackTransaction(self, *args)
def ClearStatistics(self, *args):
"""ClearStatistics(Dataset self)"""
return _gdal.Dataset_ClearStatistics(self, *args)
def GetFieldDomain(self, *args):
"""GetFieldDomain(Dataset self, char const * name) -> FieldDomain"""
return _gdal.Dataset_GetFieldDomain(self, *args)
def AddFieldDomain(self, *args):
"""AddFieldDomain(Dataset self, FieldDomain fieldDomain) -> bool"""
return _gdal.Dataset_AddFieldDomain(self, *args)
def ReadRaster1(self, *args, **kwargs):
"""ReadRaster1(Dataset self, double xoff, double yoff, double xsize, double ysize, int * buf_xsize=None, int * buf_ysize=None, GDALDataType * buf_type=None, int band_list=0, GIntBig * buf_pixel_space=None, GIntBig * buf_line_space=None, GIntBig * buf_band_space=None, GDALRIOResampleAlg resample_alg, GDALProgressFunc callback=0, void * callback_data=None, void * inputOutputBuf=None) -> CPLErr"""
return _gdal.Dataset_ReadRaster1(self, *args, **kwargs)
def ReadAsArray(self, xoff=0, yoff=0, xsize=None, ysize=None, buf_obj=None,
buf_xsize=None, buf_ysize=None, buf_type=None,
resample_alg=gdalconst.GRIORA_NearestNeighbour,
callback=None,
callback_data=None,
interleave='band',
band_list=None):
""" Reading a chunk of a GDAL band into a numpy array. The optional (buf_xsize,buf_ysize,buf_type)
parameters should generally not be specified if buf_obj is specified. The array is returned"""
from osgeo import gdal_array
return gdal_array.DatasetReadAsArray(self, xoff, yoff, xsize, ysize, buf_obj,
buf_xsize, buf_ysize, buf_type,
resample_alg=resample_alg,
callback=callback,
callback_data=callback_data,
interleave=interleave,
band_list=band_list)
def WriteArray(self, array, xoff=0, yoff=0,
band_list=None,
interleave='band',
resample_alg=gdalconst.GRIORA_NearestNeighbour,
callback=None,
callback_data=None):
from osgeo import gdal_array
return gdal_array.DatasetWriteArray(self, array, xoff, yoff,
band_list=band_list,
interleave=interleave,
resample_alg=resample_alg,
callback=callback,
callback_data=callback_data)
def WriteRaster(self, xoff, yoff, xsize, ysize,
buf_string,
buf_xsize=None, buf_ysize=None, buf_type=None,
band_list=None,
buf_pixel_space=None, buf_line_space=None, buf_band_space=None ):
if buf_xsize is None:
buf_xsize = xsize
if buf_ysize is None:
buf_ysize = ysize
if band_list is None:
band_list = list(range(1, self.RasterCount + 1))
# Redirect to numpy-friendly WriteArray() if buf_string is a numpy array
# and other arguments are compatible
if type(buf_string).__name__ == 'ndarray' and \
buf_xsize == xsize and buf_ysize == ysize and buf_type is None and \
buf_pixel_space is None and buf_line_space is None and buf_band_space is None:
return self.WriteArray(buf_string, xoff=xoff, yoff=yoff,
band_list=band_list)
if buf_type is None:
buf_type = self.GetRasterBand(1).DataType
return _gdal.Dataset_WriteRaster(self,
xoff, yoff, xsize, ysize,
buf_string, buf_xsize, buf_ysize, buf_type, band_list,
buf_pixel_space, buf_line_space, buf_band_space )
def ReadRaster(self, xoff=0, yoff=0, xsize=None, ysize=None,
buf_xsize=None, buf_ysize=None, buf_type=None,
band_list=None,
buf_pixel_space=None, buf_line_space=None, buf_band_space=None,
resample_alg=gdalconst.GRIORA_NearestNeighbour,
callback=None,
callback_data=None,
buf_obj=None):
if xsize is None:
xsize = self.RasterXSize
if ysize is None:
ysize = self.RasterYSize
if band_list is None:
band_list = list(range(1, self.RasterCount + 1))
if buf_xsize is None:
buf_xsize = xsize
if buf_ysize is None:
buf_ysize = ysize
if buf_type is None:
buf_type = self.GetRasterBand(1).DataType;
return _gdal.Dataset_ReadRaster1(self, xoff, yoff, xsize, ysize,
buf_xsize, buf_ysize, buf_type,
band_list, buf_pixel_space, buf_line_space, buf_band_space,
resample_alg, callback, callback_data, buf_obj )
def GetVirtualMemArray(self, eAccess=gdalconst.GF_Read, xoff=0, yoff=0,
xsize=None, ysize=None, bufxsize=None, bufysize=None,
datatype=None, band_list=None, band_sequential = True,
cache_size = 10 * 1024 * 1024, page_size_hint = 0,
options=None):
"""Return a NumPy array for the dataset, seen as a virtual memory mapping.
If there are several bands and band_sequential = True, an element is
accessed with array[band][y][x].
If there are several bands and band_sequential = False, an element is
accessed with array[y][x][band].
If there is only one band, an element is accessed with array[y][x].
Any reference to the array must be dropped before the last reference to the
related dataset is also dropped.
"""
from osgeo import gdal_array
if xsize is None:
xsize = self.RasterXSize
if ysize is None:
ysize = self.RasterYSize
if bufxsize is None:
bufxsize = self.RasterXSize
if bufysize is None:
bufysize = self.RasterYSize
if datatype is None:
datatype = self.GetRasterBand(1).DataType
if band_list is None:
band_list = list(range(1, self.RasterCount + 1))
if options is None:
virtualmem = self.GetVirtualMem(eAccess, xoff, yoff, xsize, ysize, bufxsize, bufysize, datatype, band_list, band_sequential, cache_size, page_size_hint)
else:
virtualmem = self.GetVirtualMem(eAccess, xoff, yoff, xsize, ysize, bufxsize, bufysize, datatype, band_list, band_sequential, cache_size, page_size_hint, options)
return gdal_array.VirtualMemGetArray( virtualmem )
def GetTiledVirtualMemArray(self, eAccess=gdalconst.GF_Read, xoff=0, yoff=0,
xsize=None, ysize=None, tilexsize=256, tileysize=256,
datatype=None, band_list=None, tile_organization=gdalconst.GTO_BSQ,
cache_size = 10 * 1024 * 1024, options=None):
"""Return a NumPy array for the dataset, seen as a virtual memory mapping with
a tile organization.
If there are several bands and tile_organization = gdal.GTO_TIP, an element is
accessed with array[tiley][tilex][y][x][band].
If there are several bands and tile_organization = gdal.GTO_BIT, an element is
accessed with array[tiley][tilex][band][y][x].
If there are several bands and tile_organization = gdal.GTO_BSQ, an element is
accessed with array[band][tiley][tilex][y][x].
If there is only one band, an element is accessed with array[tiley][tilex][y][x].
Any reference to the array must be dropped before the last reference to the
related dataset is also dropped.
"""
from osgeo import gdal_array
if xsize is None:
xsize = self.RasterXSize
if ysize is None:
ysize = self.RasterYSize
if datatype is None:
datatype = self.GetRasterBand(1).DataType
if band_list is None:
band_list = list(range(1, self.RasterCount + 1))
if options is None:
virtualmem = self.GetTiledVirtualMem(eAccess,xoff,yoff,xsize,ysize,tilexsize,tileysize,datatype,band_list,tile_organization,cache_size)
else:
virtualmem = self.GetTiledVirtualMem(eAccess,xoff,yoff,xsize,ysize,tilexsize,tileysize,datatype,band_list,tile_organization,cache_size, options)
return gdal_array.VirtualMemGetArray( virtualmem )
def GetSubDatasets(self):
sd_list = []
sd = self.GetMetadata('SUBDATASETS')
if sd is None:
return sd_list
i = 1
while 'SUBDATASET_'+str(i)+'_NAME' in sd:
sd_list.append((sd['SUBDATASET_'+str(i)+'_NAME'],
sd['SUBDATASET_'+str(i)+'_DESC']))
i = i + 1
return sd_list
def BeginAsyncReader(self, xoff, yoff, xsize, ysize, buf_obj=None, buf_xsize=None, buf_ysize=None, buf_type=None, band_list=None, options=None):
if band_list is None:
band_list = list(range(1, self.RasterCount + 1))
if buf_xsize is None:
buf_xsize = 0;
if buf_ysize is None:
buf_ysize = 0;
if buf_type is None:
buf_type = gdalconst.GDT_Byte
if buf_xsize <= 0:
buf_xsize = xsize
if buf_ysize <= 0:
buf_ysize = ysize
options = [] if options is None else options
if buf_obj is None:
from sys import version_info
nRequiredSize = int(buf_xsize * buf_ysize * len(band_list) * (_gdal.GetDataTypeSize(buf_type) / 8))
if version_info >= (3, 0, 0):
buf_obj_ar = [None]
exec("buf_obj_ar[0] = b' ' * nRequiredSize")
buf_obj = buf_obj_ar[0]
else:
buf_obj = ' ' * nRequiredSize
return _gdal.Dataset_BeginAsyncReader(self, xoff, yoff, xsize, ysize, buf_obj, buf_xsize, buf_ysize, buf_type, band_list, 0, 0, 0, options)
def GetLayer(self, iLayer=0):
"""Return the layer given an index or a name"""
if isinstance(iLayer, str):
return self.GetLayerByName(str(iLayer))
elif isinstance(iLayer, int):
return self.GetLayerByIndex(iLayer)
else:
raise TypeError("Input %s is not of String or Int type" % type(iLayer))
def DeleteLayer(self, value):
"""Deletes the layer given an index or layer name"""
if isinstance(value, str):
for i in range(self.GetLayerCount()):
name = self.GetLayer(i).GetName()
if name == value:
return _gdal.Dataset_DeleteLayer(self, i)
raise ValueError("Layer %s not found to delete" % value)
elif isinstance(value, int):
return _gdal.Dataset_DeleteLayer(self, value)
else:
raise TypeError("Input %s is not of String or Int type" % type(value))
def SetGCPs(self, gcps, wkt_or_spatial_ref):
if isinstance(wkt_or_spatial_ref, str):
return self._SetGCPs(gcps, wkt_or_spatial_ref)
else:
return self._SetGCPs2(gcps, wkt_or_spatial_ref)
Dataset_swigregister = _gdal.Dataset_swigregister
Dataset_swigregister(Dataset)
class Group(_object):
"""Proxy of C++ GDALGroupHS class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Group, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Group, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_destroy__ = _gdal.delete_Group
__del__ = lambda self: None
def GetName(self, *args):
"""GetName(Group self) -> char const *"""
return _gdal.Group_GetName(self, *args)
def GetFullName(self, *args):
"""GetFullName(Group self) -> char const *"""
return _gdal.Group_GetFullName(self, *args)
def GetMDArrayNames(self, *args):
"""GetMDArrayNames(Group self, char ** options=None) -> char **"""
return _gdal.Group_GetMDArrayNames(self, *args)
def OpenMDArray(self, *args):
"""OpenMDArray(Group self, char const * name, char ** options=None) -> MDArray"""
return _gdal.Group_OpenMDArray(self, *args)
def OpenMDArrayFromFullname(self, *args):
"""OpenMDArrayFromFullname(Group self, char const * name, char ** options=None) -> MDArray"""
return _gdal.Group_OpenMDArrayFromFullname(self, *args)
def ResolveMDArray(self, *args):
"""ResolveMDArray(Group self, char const * name, char const * starting_point, char ** options=None) -> MDArray"""
return _gdal.Group_ResolveMDArray(self, *args)
def GetGroupNames(self, *args):
"""GetGroupNames(Group self, char ** options=None) -> char **"""
return _gdal.Group_GetGroupNames(self, *args)
def OpenGroup(self, *args):
"""OpenGroup(Group self, char const * name, char ** options=None) -> Group"""
return _gdal.Group_OpenGroup(self, *args)
def OpenGroupFromFullname(self, *args):
"""OpenGroupFromFullname(Group self, char const * name, char ** options=None) -> Group"""
return _gdal.Group_OpenGroupFromFullname(self, *args)
def GetDimensions(self, *args):
"""GetDimensions(Group self, char ** options=None)"""
return _gdal.Group_GetDimensions(self, *args)
def GetAttribute(self, *args):
"""GetAttribute(Group self, char const * name) -> Attribute"""
return _gdal.Group_GetAttribute(self, *args)
def GetAttributes(self, *args):
"""GetAttributes(Group self, char ** options=None)"""
return _gdal.Group_GetAttributes(self, *args)
def GetStructuralInfo(self, *args):
"""GetStructuralInfo(Group self) -> char **"""
return _gdal.Group_GetStructuralInfo(self, *args)
def CreateGroup(self, *args):
"""CreateGroup(Group self, char const * name, char ** options=None) -> Group"""
return _gdal.Group_CreateGroup(self, *args)
def CreateDimension(self, *args):
"""CreateDimension(Group self, char const * name, char const * type, char const * direction, unsigned long long size, char ** options=None) -> Dimension"""
return _gdal.Group_CreateDimension(self, *args)
def CreateMDArray(self, *args):
"""CreateMDArray(Group self, char const * name, int nDimensions, ExtendedDataType data_type, char ** options=None) -> MDArray"""
return _gdal.Group_CreateMDArray(self, *args)
def CreateAttribute(self, *args):
"""CreateAttribute(Group self, char const * name, int nDimensions, ExtendedDataType data_type, char ** options=None) -> Attribute"""
return _gdal.Group_CreateAttribute(self, *args)
Group_swigregister = _gdal.Group_swigregister
Group_swigregister(Group)
class Statistics(_object):
"""Proxy of C++ Statistics class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Statistics, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Statistics, name)
__repr__ = _swig_repr
__swig_getmethods__["min"] = _gdal.Statistics_min_get
if _newclass:
min = _swig_property(_gdal.Statistics_min_get)
__swig_getmethods__["max"] = _gdal.Statistics_max_get
if _newclass:
max = _swig_property(_gdal.Statistics_max_get)
__swig_getmethods__["mean"] = _gdal.Statistics_mean_get
if _newclass:
mean = _swig_property(_gdal.Statistics_mean_get)
__swig_getmethods__["std_dev"] = _gdal.Statistics_std_dev_get
if _newclass:
std_dev = _swig_property(_gdal.Statistics_std_dev_get)
__swig_getmethods__["valid_count"] = _gdal.Statistics_valid_count_get
if _newclass:
valid_count = _swig_property(_gdal.Statistics_valid_count_get)
__swig_destroy__ = _gdal.delete_Statistics
__del__ = lambda self: None
def __init__(self, *args):
"""__init__(Statistics self) -> Statistics"""
this = _gdal.new_Statistics(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
Statistics_swigregister = _gdal.Statistics_swigregister
Statistics_swigregister(Statistics)
class MDArray(_object):
"""Proxy of C++ GDALMDArrayHS class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, MDArray, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, MDArray, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_destroy__ = _gdal.delete_MDArray
__del__ = lambda self: None
def GetName(self, *args):
"""GetName(MDArray self) -> char const *"""
return _gdal.MDArray_GetName(self, *args)
def GetFullName(self, *args):
"""GetFullName(MDArray self) -> char const *"""
return _gdal.MDArray_GetFullName(self, *args)
def GetTotalElementsCount(self, *args):
"""GetTotalElementsCount(MDArray self) -> unsigned long long"""
return _gdal.MDArray_GetTotalElementsCount(self, *args)
def GetDimensionCount(self, *args):
"""GetDimensionCount(MDArray self) -> size_t"""
return _gdal.MDArray_GetDimensionCount(self, *args)
def GetDimensions(self, *args):
"""GetDimensions(MDArray self)"""
return _gdal.MDArray_GetDimensions(self, *args)
def GetBlockSize(self, *args):
"""GetBlockSize(MDArray self)"""
return _gdal.MDArray_GetBlockSize(self, *args)
def GetProcessingChunkSize(self, *args):
"""GetProcessingChunkSize(MDArray self, size_t nMaxChunkMemory)"""
return _gdal.MDArray_GetProcessingChunkSize(self, *args)
def GetDataType(self, *args):
"""GetDataType(MDArray self) -> ExtendedDataType"""
return _gdal.MDArray_GetDataType(self, *args)
def GetStructuralInfo(self, *args):
"""GetStructuralInfo(MDArray self) -> char **"""
return _gdal.MDArray_GetStructuralInfo(self, *args)
def Read(self, *args):
"""Read(MDArray self, int nDims1, int nDims2, int nDims3, int nDims4, ExtendedDataType buffer_datatype) -> CPLErr"""
return _gdal.MDArray_Read(self, *args)
def WriteStringArray(self, *args):
"""WriteStringArray(MDArray self, int nDims1, int nDims2, int nDims3, ExtendedDataType buffer_datatype, char ** options) -> CPLErr"""
return _gdal.MDArray_WriteStringArray(self, *args)
def Write(self, *args):
"""Write(MDArray self, int nDims1, int nDims2, int nDims3, int nDims4, ExtendedDataType buffer_datatype, GIntBig buf_len) -> CPLErr"""
return _gdal.MDArray_Write(self, *args)
def AdviseRead(self, *args):
"""AdviseRead(MDArray self, int nDims1, int nDims2) -> CPLErr"""
return _gdal.MDArray_AdviseRead(self, *args)
def GetAttribute(self, *args):
"""GetAttribute(MDArray self, char const * name) -> Attribute"""
return _gdal.MDArray_GetAttribute(self, *args)
def GetAttributes(self, *args):
"""GetAttributes(MDArray self, char ** options=None)"""
return _gdal.MDArray_GetAttributes(self, *args)
def CreateAttribute(self, *args):
"""CreateAttribute(MDArray self, char const * name, int nDimensions, ExtendedDataType data_type, char ** options=None) -> Attribute"""
return _gdal.MDArray_CreateAttribute(self, *args)
def GetNoDataValueAsRaw(self, *args):
"""GetNoDataValueAsRaw(MDArray self) -> CPLErr"""
return _gdal.MDArray_GetNoDataValueAsRaw(self, *args)
def GetNoDataValueAsDouble(self, *args):
"""GetNoDataValueAsDouble(MDArray self)"""
return _gdal.MDArray_GetNoDataValueAsDouble(self, *args)
def SetNoDataValueDouble(self, *args):
"""SetNoDataValueDouble(MDArray self, double d) -> CPLErr"""
return _gdal.MDArray_SetNoDataValueDouble(self, *args)
def SetNoDataValueRaw(self, *args):
"""SetNoDataValueRaw(MDArray self, GIntBig nLen) -> CPLErr"""
return _gdal.MDArray_SetNoDataValueRaw(self, *args)
def DeleteNoDataValue(self, *args):
"""DeleteNoDataValue(MDArray self) -> CPLErr"""
return _gdal.MDArray_DeleteNoDataValue(self, *args)
def GetOffset(self, *args):
"""GetOffset(MDArray self)"""
return _gdal.MDArray_GetOffset(self, *args)
def GetOffsetStorageType(self, *args):
"""GetOffsetStorageType(MDArray self) -> GDALDataType"""
return _gdal.MDArray_GetOffsetStorageType(self, *args)
def GetScale(self, *args):
"""GetScale(MDArray self)"""
return _gdal.MDArray_GetScale(self, *args)
def GetScaleStorageType(self, *args):
"""GetScaleStorageType(MDArray self) -> GDALDataType"""
return _gdal.MDArray_GetScaleStorageType(self, *args)
def SetOffset(self, *args, **kwargs):
"""SetOffset(MDArray self, double val, GDALDataType storageType) -> CPLErr"""
return _gdal.MDArray_SetOffset(self, *args, **kwargs)
def SetScale(self, *args, **kwargs):
"""SetScale(MDArray self, double val, GDALDataType storageType) -> CPLErr"""
return _gdal.MDArray_SetScale(self, *args, **kwargs)
def SetUnit(self, *args):
"""SetUnit(MDArray self, char const * unit) -> CPLErr"""
return _gdal.MDArray_SetUnit(self, *args)
def GetUnit(self, *args):
"""GetUnit(MDArray self) -> char const *"""
return _gdal.MDArray_GetUnit(self, *args)
def SetSpatialRef(self, *args):
"""SetSpatialRef(MDArray self, SpatialReference srs) -> OGRErr"""
return _gdal.MDArray_SetSpatialRef(self, *args)
def GetSpatialRef(self, *args):
"""GetSpatialRef(MDArray self) -> SpatialReference"""
return _gdal.MDArray_GetSpatialRef(self, *args)
def GetView(self, *args):
"""GetView(MDArray self, char const * viewExpr) -> MDArray"""
return _gdal.MDArray_GetView(self, *args)
def Transpose(self, *args):
"""Transpose(MDArray self, int nList) -> MDArray"""
return _gdal.MDArray_Transpose(self, *args)
def GetUnscaled(self, *args):
"""GetUnscaled(MDArray self) -> MDArray"""
return _gdal.MDArray_GetUnscaled(self, *args)
def GetMask(self, *args):
"""GetMask(MDArray self, char ** options=None) -> MDArray"""
return _gdal.MDArray_GetMask(self, *args)
def AsClassicDataset(self, *args):
"""AsClassicDataset(MDArray self, size_t iXDim, size_t iYDim) -> Dataset"""
return _gdal.MDArray_AsClassicDataset(self, *args)
def GetStatistics(self, *args, **kwargs):
"""GetStatistics(MDArray self, Dataset ds=None, bool approx_ok=False, bool force=True, GDALProgressFunc callback=0, void * callback_data=None) -> Statistics"""
return _gdal.MDArray_GetStatistics(self, *args, **kwargs)
def ComputeStatistics(self, *args, **kwargs):
"""ComputeStatistics(MDArray self, Dataset ds=None, bool approx_ok=False, GDALProgressFunc callback=0, void * callback_data=None) -> Statistics"""
return _gdal.MDArray_ComputeStatistics(self, *args, **kwargs)
def Read(self,
array_start_idx = None,
count = None,
array_step = None,
buffer_stride = None,
buffer_datatype = None):
if not array_start_idx:
array_start_idx = [0] * self.GetDimensionCount()
if not count:
count = [ dim.GetSize() for dim in self.GetDimensions() ]
if not array_step:
array_step = [1] * self.GetDimensionCount()
if not buffer_stride:
stride = 1
buffer_stride = []
# To compute strides we must proceed from the fastest varying dimension
# (the last one), and then reverse the result
for cnt in reversed(count):
buffer_stride.append(stride)
stride *= cnt
buffer_stride.reverse()
if not buffer_datatype:
buffer_datatype = self.GetDataType()
return _gdal.MDArray_Read(self, array_start_idx, count, array_step, buffer_stride, buffer_datatype)
def ReadAsArray(self,
array_start_idx = None,
count = None,
array_step = None,
buffer_datatype = None,
buf_obj = None):
from osgeo import gdal_array
return gdal_array.MDArrayReadAsArray(self, array_start_idx, count, array_step, buffer_datatype, buf_obj)
def AdviseRead(self, array_start_idx = None, count = None):
if not array_start_idx:
array_start_idx = [0] * self.GetDimensionCount()
if not count:
count = [ (self.GetDimensions()[i].GetSize() - array_start_idx[i]) for i in range (self.GetDimensionCount()) ]
return _gdal.MDArray_AdviseRead(self, array_start_idx, count)
def __getitem__(self, item):
def stringify(v):
if v == Ellipsis:
return '...'
if isinstance(v, slice):
return ':'.join([str(x) if x is not None else '' for x in (v.start, v.stop, v.step)])
if isinstance(v, str):
return v
if isinstance(v, (int, type(12345678901234))):
return str(v)
try:
import numpy as np
if v == np.newaxis:
return 'newaxis'
except:
pass
return str(v)
if isinstance(item, str):
return self.GetView('["' + item.replace('\\', '\\\\').replace('"', '\\"') + '"]')
elif isinstance(item, slice):
return self.GetView('[' + stringify(item) + ']')
elif isinstance(item, tuple):
return self.GetView('[' + ','.join([stringify(x) for x in item]) + ']')
else:
return self.GetView('[' + stringify(item) + ']')
def Write(self,
buffer,
array_start_idx = None,
count = None,
array_step = None,
buffer_stride = None,
buffer_datatype = None):
dimCount = self.GetDimensionCount()
# Redirect to numpy-friendly WriteArray() if buffer is a numpy array
# and other arguments are compatible
if type(buffer).__name__ == 'ndarray' and \
count is None and buffer_stride is None and buffer_datatype is None:
return self.WriteArray(buffer, array_start_idx=array_start_idx, array_step=array_step)
# Special case for buffer of type array and 1D arrays
if dimCount == 1 and type(buffer).__name__ == 'array' and \
count is None and buffer_stride is None and buffer_datatype is None:
map_typecode_itemsize_to_gdal = {
('B',1): GDT_Byte,
('h',2): GDT_Int16,
('H',2): GDT_UInt16,
('i',4): GDT_Int32,
('I',4): GDT_UInt32,
('l',4): GDT_Int32,
# ('l',8): GDT_Int64,
# ('q',8): GDT_Int64,
# ('Q',8): GDT_UInt64,
('f', 4): GDT_Float32,
('d', 8): GDT_Float64
}
key = (buffer.typecode, buffer.itemsize)
if key not in map_typecode_itemsize_to_gdal:
raise Exception("unhandled type for buffer of type array")
buffer_datatype = ExtendedDataType.Create(map_typecode_itemsize_to_gdal[key])
# Special case for a list of numeric values and 1D arrays
elif dimCount == 1 and type(buffer) == type([]) and len(buffer) != 0 \
and self.GetDataType().GetClass() != GEDTC_STRING:
buffer_datatype = GDT_Int32
for v in buffer:
if isinstance(v, int):
if v >= (1 << 31) or v < -(1 << 31):
buffer_datatype = GDT_Float64
elif isinstance(v, float):
buffer_datatype = GDT_Float64
else:
raise ValueError('Only lists with integer or float elements are supported')
import array
buffer = array.array('d' if buffer_datatype == GDT_Float64 else 'i', buffer)
buffer_datatype = ExtendedDataType.Create(buffer_datatype)
if not buffer_datatype:
buffer_datatype = self.GetDataType()
is_1d_string = self.GetDataType().GetClass() == GEDTC_STRING and buffer_datatype.GetClass() == GEDTC_STRING and dimCount == 1
if not array_start_idx:
array_start_idx = [0] * dimCount
if not count:
if is_1d_string:
assert type(buffer) == type([])
count = [ len(buffer) ]
else:
count = [ dim.GetSize() for dim in self.GetDimensions() ]
if not array_step:
array_step = [1] * dimCount
if not buffer_stride:
stride = 1
buffer_stride = []
# To compute strides we must proceed from the fastest varying dimension
# (the last one), and then reverse the result
for cnt in reversed(count):
buffer_stride.append(stride)
stride *= cnt
buffer_stride.reverse()
if is_1d_string:
return _gdal.MDArray_WriteStringArray(self, array_start_idx, count, array_step, buffer_datatype, buffer)
return _gdal.MDArray_Write(self, array_start_idx, count, array_step, buffer_stride, buffer_datatype, buffer)
def WriteArray(self, array,
array_start_idx = None,
array_step = None):
from osgeo import gdal_array
return gdal_array.MDArrayWriteArray(self, array, array_start_idx, array_step)
def ReadAsMaskedArray(self,
array_start_idx = None,
count = None,
array_step = None):
""" Return a numpy masked array of ReadAsArray() with GetMask() """
import numpy
mask = self.GetMask()
if mask is not None:
array = self.ReadAsArray(array_start_idx, count, array_step)
mask_array = mask.ReadAsArray(array_start_idx, count, array_step)
bool_array = ~mask_array.astype(bool)
return numpy.ma.array(array, mask=bool_array)
else:
return numpy.ma.array(self.ReadAsArray(array_start_idx, count, array_step), mask=None)
def GetShape(self):
""" Return the shape of the array """
if not self.GetDimensionCount():
return None
shp = ()
for dim in self.GetDimensions():
shp += (dim.GetSize(),)
return shp
shape = property(fget=GetShape, doc='Returns the shape of the array.')
MDArray_swigregister = _gdal.MDArray_swigregister
MDArray_swigregister(MDArray)
class Attribute(_object):
"""Proxy of C++ GDALAttributeHS class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Attribute, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Attribute, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_destroy__ = _gdal.delete_Attribute
__del__ = lambda self: None
def GetName(self, *args):
"""GetName(Attribute self) -> char const *"""
return _gdal.Attribute_GetName(self, *args)
def GetFullName(self, *args):
"""GetFullName(Attribute self) -> char const *"""
return _gdal.Attribute_GetFullName(self, *args)
def GetTotalElementsCount(self, *args):
"""GetTotalElementsCount(Attribute self) -> unsigned long long"""
return _gdal.Attribute_GetTotalElementsCount(self, *args)
def GetDimensionCount(self, *args):
"""GetDimensionCount(Attribute self) -> size_t"""
return _gdal.Attribute_GetDimensionCount(self, *args)
def GetDimensionsSize(self, *args):
"""GetDimensionsSize(Attribute self)"""
return _gdal.Attribute_GetDimensionsSize(self, *args)
def GetDataType(self, *args):
"""GetDataType(Attribute self) -> ExtendedDataType"""
return _gdal.Attribute_GetDataType(self, *args)
def ReadAsRaw(self, *args):
"""ReadAsRaw(Attribute self) -> CPLErr"""
return _gdal.Attribute_ReadAsRaw(self, *args)
def ReadAsString(self, *args):
"""ReadAsString(Attribute self) -> char const *"""
return _gdal.Attribute_ReadAsString(self, *args)
def ReadAsInt(self, *args):
"""ReadAsInt(Attribute self) -> int"""
return _gdal.Attribute_ReadAsInt(self, *args)
def ReadAsDouble(self, *args):
"""ReadAsDouble(Attribute self) -> double"""
return _gdal.Attribute_ReadAsDouble(self, *args)
def ReadAsStringArray(self, *args):
"""ReadAsStringArray(Attribute self) -> char **"""
return _gdal.Attribute_ReadAsStringArray(self, *args)
def ReadAsIntArray(self, *args):
"""ReadAsIntArray(Attribute self)"""
return _gdal.Attribute_ReadAsIntArray(self, *args)
def ReadAsDoubleArray(self, *args):
"""ReadAsDoubleArray(Attribute self)"""
return _gdal.Attribute_ReadAsDoubleArray(self, *args)
def WriteRaw(self, *args):
"""WriteRaw(Attribute self, GIntBig nLen) -> CPLErr"""
return _gdal.Attribute_WriteRaw(self, *args)
def WriteString(self, *args):
"""WriteString(Attribute self, char const * val) -> CPLErr"""
return _gdal.Attribute_WriteString(self, *args)
def WriteStringArray(self, *args):
"""WriteStringArray(Attribute self, char ** vals) -> CPLErr"""
return _gdal.Attribute_WriteStringArray(self, *args)
def WriteInt(self, *args):
"""WriteInt(Attribute self, int val) -> CPLErr"""
return _gdal.Attribute_WriteInt(self, *args)
def WriteDouble(self, *args):
"""WriteDouble(Attribute self, double val) -> CPLErr"""
return _gdal.Attribute_WriteDouble(self, *args)
def WriteDoubleArray(self, *args):
"""WriteDoubleArray(Attribute self, int nList) -> CPLErr"""
return _gdal.Attribute_WriteDoubleArray(self, *args)
def Read(self):
""" Read an attribute and return it with the most appropriate type """
dt_class = self.GetDataType().GetClass()
if dt_class == GEDTC_STRING:
if self.GetTotalElementsCount() == 1:
return self.ReadAsString()
return self.ReadAsStringArray()
if dt_class == GEDTC_NUMERIC:
if self.GetDataType().GetNumericDataType() in (GDT_Byte, GDT_Int16, GDT_UInt16, GDT_Int32):
if self.GetTotalElementsCount() == 1:
return self.ReadAsInt()
else:
return self.ReadAsIntArray()
else:
if self.GetTotalElementsCount() == 1:
return self.ReadAsDouble()
else:
return self.ReadAsDoubleArray()
return self.ReadAsRaw()
def Write(self, val):
if isinstance(val, (int, type(12345678901234))):
if val >= -0x80000000 and val <= 0x7FFFFFFF:
return self.WriteInt(val)
else:
return self.WriteDouble(val)
if isinstance(val, float):
return self.WriteDouble(val)
if isinstance(val, str) and self.GetDataType().GetClass() != GEDTC_COMPOUND:
return self.WriteString(val)
if isinstance(val, list):
if len(val) == 0:
if self.GetDataType().GetClass() == GEDTC_STRING:
return self.WriteStringArray(val)
else:
return self.WriteDoubleArray(val)
if isinstance(val[0], (int, type(12345678901234), float)):
return self.WriteDoubleArray(val)
if isinstance(val[0], str):
return self.WriteStringArray(val)
return self.WriteRaw(val)
Attribute_swigregister = _gdal.Attribute_swigregister
Attribute_swigregister(Attribute)
class Dimension(_object):
"""Proxy of C++ GDALDimensionHS class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Dimension, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Dimension, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_destroy__ = _gdal.delete_Dimension
__del__ = lambda self: None
def GetName(self, *args):
"""GetName(Dimension self) -> char const *"""
return _gdal.Dimension_GetName(self, *args)
def GetFullName(self, *args):
"""GetFullName(Dimension self) -> char const *"""
return _gdal.Dimension_GetFullName(self, *args)
def GetType(self, *args):
"""GetType(Dimension self) -> char const *"""
return _gdal.Dimension_GetType(self, *args)
def GetDirection(self, *args):
"""GetDirection(Dimension self) -> char const *"""
return _gdal.Dimension_GetDirection(self, *args)
def GetSize(self, *args):
"""GetSize(Dimension self) -> unsigned long long"""
return _gdal.Dimension_GetSize(self, *args)
def GetIndexingVariable(self, *args):
"""GetIndexingVariable(Dimension self) -> MDArray"""
return _gdal.Dimension_GetIndexingVariable(self, *args)
def SetIndexingVariable(self, *args):
"""SetIndexingVariable(Dimension self, MDArray array) -> bool"""
return _gdal.Dimension_SetIndexingVariable(self, *args)
Dimension_swigregister = _gdal.Dimension_swigregister
Dimension_swigregister(Dimension)
GEDTC_NUMERIC = _gdal.GEDTC_NUMERIC
GEDTC_STRING = _gdal.GEDTC_STRING
GEDTC_COMPOUND = _gdal.GEDTC_COMPOUND
class ExtendedDataType(_object):
"""Proxy of C++ GDALExtendedDataTypeHS class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, ExtendedDataType, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, ExtendedDataType, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_destroy__ = _gdal.delete_ExtendedDataType
__del__ = lambda self: None
def Create(*args):
"""Create(GDALDataType dt) -> ExtendedDataType"""
return _gdal.ExtendedDataType_Create(*args)
Create = staticmethod(Create)
def CreateString(*args):
"""CreateString(size_t nMaxStringLength=0) -> ExtendedDataType"""
return _gdal.ExtendedDataType_CreateString(*args)
CreateString = staticmethod(CreateString)
def CreateCompound(*args):
"""CreateCompound(char const * name, size_t nTotalSize, int nComps) -> ExtendedDataType"""
return _gdal.ExtendedDataType_CreateCompound(*args)
CreateCompound = staticmethod(CreateCompound)
def GetName(self, *args):
"""GetName(ExtendedDataType self) -> char const *"""
return _gdal.ExtendedDataType_GetName(self, *args)
def GetClass(self, *args):
"""GetClass(ExtendedDataType self) -> GDALExtendedDataTypeClass"""
return _gdal.ExtendedDataType_GetClass(self, *args)
def GetNumericDataType(self, *args):
"""GetNumericDataType(ExtendedDataType self) -> GDALDataType"""
return _gdal.ExtendedDataType_GetNumericDataType(self, *args)
def GetSize(self, *args):
"""GetSize(ExtendedDataType self) -> size_t"""
return _gdal.ExtendedDataType_GetSize(self, *args)
def GetMaxStringLength(self, *args):
"""GetMaxStringLength(ExtendedDataType self) -> size_t"""
return _gdal.ExtendedDataType_GetMaxStringLength(self, *args)
def GetComponents(self, *args):
"""GetComponents(ExtendedDataType self)"""
return _gdal.ExtendedDataType_GetComponents(self, *args)
def CanConvertTo(self, *args):
"""CanConvertTo(ExtendedDataType self, ExtendedDataType other) -> bool"""
return _gdal.ExtendedDataType_CanConvertTo(self, *args)
def Equals(self, *args):
"""Equals(ExtendedDataType self, ExtendedDataType other) -> bool"""
return _gdal.ExtendedDataType_Equals(self, *args)
def __eq__(self, other):
return self.Equals(other)
def __ne__(self, other):
return not self.__eq__(other)
ExtendedDataType_swigregister = _gdal.ExtendedDataType_swigregister
ExtendedDataType_swigregister(ExtendedDataType)
def ExtendedDataType_Create(*args):
"""ExtendedDataType_Create(GDALDataType dt) -> ExtendedDataType"""
return _gdal.ExtendedDataType_Create(*args)
def ExtendedDataType_CreateString(*args):
"""ExtendedDataType_CreateString(size_t nMaxStringLength=0) -> ExtendedDataType"""
return _gdal.ExtendedDataType_CreateString(*args)
def ExtendedDataType_CreateCompound(*args):
"""ExtendedDataType_CreateCompound(char const * name, size_t nTotalSize, int nComps) -> ExtendedDataType"""
return _gdal.ExtendedDataType_CreateCompound(*args)
class EDTComponent(_object):
"""Proxy of C++ GDALEDTComponentHS class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, EDTComponent, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, EDTComponent, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_destroy__ = _gdal.delete_EDTComponent
__del__ = lambda self: None
def Create(*args):
"""Create(char const * name, size_t offset, ExtendedDataType type) -> EDTComponent"""
return _gdal.EDTComponent_Create(*args)
Create = staticmethod(Create)
def GetName(self, *args):
"""GetName(EDTComponent self) -> char const *"""
return _gdal.EDTComponent_GetName(self, *args)
def GetOffset(self, *args):
"""GetOffset(EDTComponent self) -> size_t"""
return _gdal.EDTComponent_GetOffset(self, *args)
def GetType(self, *args):
"""GetType(EDTComponent self) -> ExtendedDataType"""
return _gdal.EDTComponent_GetType(self, *args)
EDTComponent_swigregister = _gdal.EDTComponent_swigregister
EDTComponent_swigregister(EDTComponent)
def EDTComponent_Create(*args):
"""EDTComponent_Create(char const * name, size_t offset, ExtendedDataType type) -> EDTComponent"""
return _gdal.EDTComponent_Create(*args)
class Band(MajorObject):
"""Proxy of C++ GDALRasterBandShadow class."""
__swig_setmethods__ = {}
for _s in [MajorObject]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, Band, name, value)
__swig_getmethods__ = {}
for _s in [MajorObject]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, Band, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_getmethods__["XSize"] = _gdal.Band_XSize_get
if _newclass:
XSize = _swig_property(_gdal.Band_XSize_get)
__swig_getmethods__["YSize"] = _gdal.Band_YSize_get
if _newclass:
YSize = _swig_property(_gdal.Band_YSize_get)
__swig_getmethods__["DataType"] = _gdal.Band_DataType_get
if _newclass:
DataType = _swig_property(_gdal.Band_DataType_get)
def GetDataset(self, *args):
"""GetDataset(Band self) -> Dataset"""
return _gdal.Band_GetDataset(self, *args)
def GetBand(self, *args):
"""GetBand(Band self) -> int"""
return _gdal.Band_GetBand(self, *args)
def GetBlockSize(self, *args):
"""GetBlockSize(Band self)"""
return _gdal.Band_GetBlockSize(self, *args)
def GetActualBlockSize(self, *args):
"""GetActualBlockSize(Band self, int nXBlockOff, int nYBlockOff)"""
return _gdal.Band_GetActualBlockSize(self, *args)
def GetColorInterpretation(self, *args):
"""GetColorInterpretation(Band self) -> GDALColorInterp"""
return _gdal.Band_GetColorInterpretation(self, *args)
def GetRasterColorInterpretation(self, *args):
"""GetRasterColorInterpretation(Band self) -> GDALColorInterp"""
return _gdal.Band_GetRasterColorInterpretation(self, *args)
def SetColorInterpretation(self, *args):
"""SetColorInterpretation(Band self, GDALColorInterp val) -> CPLErr"""
return _gdal.Band_SetColorInterpretation(self, *args)
def SetRasterColorInterpretation(self, *args):
"""SetRasterColorInterpretation(Band self, GDALColorInterp val) -> CPLErr"""
return _gdal.Band_SetRasterColorInterpretation(self, *args)
def GetNoDataValue(self, *args):
"""GetNoDataValue(Band self)"""
return _gdal.Band_GetNoDataValue(self, *args)
def SetNoDataValue(self, *args):
"""SetNoDataValue(Band self, double d) -> CPLErr"""
return _gdal.Band_SetNoDataValue(self, *args)
def DeleteNoDataValue(self, *args):
"""DeleteNoDataValue(Band self) -> CPLErr"""
return _gdal.Band_DeleteNoDataValue(self, *args)
def GetUnitType(self, *args):
"""GetUnitType(Band self) -> char const *"""
return _gdal.Band_GetUnitType(self, *args)
def SetUnitType(self, *args):
"""SetUnitType(Band self, char const * val) -> CPLErr"""
return _gdal.Band_SetUnitType(self, *args)
def GetRasterCategoryNames(self, *args):
"""GetRasterCategoryNames(Band self) -> char **"""
return _gdal.Band_GetRasterCategoryNames(self, *args)
def SetRasterCategoryNames(self, *args):
"""SetRasterCategoryNames(Band self, char ** names) -> CPLErr"""
return _gdal.Band_SetRasterCategoryNames(self, *args)
def GetMinimum(self, *args):
"""GetMinimum(Band self)"""
return _gdal.Band_GetMinimum(self, *args)
def GetMaximum(self, *args):
"""GetMaximum(Band self)"""
return _gdal.Band_GetMaximum(self, *args)
def GetOffset(self, *args):
"""GetOffset(Band self)"""
return _gdal.Band_GetOffset(self, *args)
def GetScale(self, *args):
"""GetScale(Band self)"""
return _gdal.Band_GetScale(self, *args)
def SetOffset(self, *args):
"""SetOffset(Band self, double val) -> CPLErr"""
return _gdal.Band_SetOffset(self, *args)
def SetScale(self, *args):
"""SetScale(Band self, double val) -> CPLErr"""
return _gdal.Band_SetScale(self, *args)
def GetStatistics(self, *args):
"""GetStatistics(Band self, int approx_ok, int force) -> CPLErr"""
return _gdal.Band_GetStatistics(self, *args)
def ComputeStatistics(self, *args):
"""ComputeStatistics(Band self, bool approx_ok, GDALProgressFunc callback=0, void * callback_data=None) -> CPLErr"""
return _gdal.Band_ComputeStatistics(self, *args)
def SetStatistics(self, *args):
"""SetStatistics(Band self, double min, double max, double mean, double stddev) -> CPLErr"""
return _gdal.Band_SetStatistics(self, *args)
def GetOverviewCount(self, *args):
"""GetOverviewCount(Band self) -> int"""
return _gdal.Band_GetOverviewCount(self, *args)
def GetOverview(self, *args):
"""GetOverview(Band self, int i) -> Band"""
return _gdal.Band_GetOverview(self, *args)
def Checksum(self, *args, **kwargs):
"""Checksum(Band self, int xoff=0, int yoff=0, int * xsize=None, int * ysize=None) -> int"""
return _gdal.Band_Checksum(self, *args, **kwargs)
def ComputeRasterMinMax(self, *args):
"""ComputeRasterMinMax(Band self, int approx_ok=0)"""
return _gdal.Band_ComputeRasterMinMax(self, *args)
def ComputeBandStats(self, *args):
"""ComputeBandStats(Band self, int samplestep=1)"""
return _gdal.Band_ComputeBandStats(self, *args)
def Fill(self, *args):
"""Fill(Band self, double real_fill, double imag_fill=0.0) -> CPLErr"""
return _gdal.Band_Fill(self, *args)
def WriteRaster(self, *args, **kwargs):
"""WriteRaster(Band self, int xoff, int yoff, int xsize, int ysize, GIntBig buf_len, int * buf_xsize=None, int * buf_ysize=None, GDALDataType * buf_type=None, GIntBig * buf_pixel_space=None, GIntBig * buf_line_space=None) -> CPLErr"""
return _gdal.Band_WriteRaster(self, *args, **kwargs)
def FlushCache(self, *args):
"""FlushCache(Band self)"""
return _gdal.Band_FlushCache(self, *args)
def GetRasterColorTable(self, *args):
"""GetRasterColorTable(Band self) -> ColorTable"""
return _gdal.Band_GetRasterColorTable(self, *args)
def GetColorTable(self, *args):
"""GetColorTable(Band self) -> ColorTable"""
return _gdal.Band_GetColorTable(self, *args)
def SetRasterColorTable(self, *args):
"""SetRasterColorTable(Band self, ColorTable arg) -> int"""
return _gdal.Band_SetRasterColorTable(self, *args)
def SetColorTable(self, *args):
"""SetColorTable(Band self, ColorTable arg) -> int"""
return _gdal.Band_SetColorTable(self, *args)
def GetDefaultRAT(self, *args):
"""GetDefaultRAT(Band self) -> RasterAttributeTable"""
return _gdal.Band_GetDefaultRAT(self, *args)
def SetDefaultRAT(self, *args):
"""SetDefaultRAT(Band self, RasterAttributeTable table) -> int"""
return _gdal.Band_SetDefaultRAT(self, *args)
def GetMaskBand(self, *args):
"""GetMaskBand(Band self) -> Band"""
return _gdal.Band_GetMaskBand(self, *args)
def GetMaskFlags(self, *args):
"""GetMaskFlags(Band self) -> int"""
return _gdal.Band_GetMaskFlags(self, *args)
def CreateMaskBand(self, *args):
"""CreateMaskBand(Band self, int nFlags) -> CPLErr"""
return _gdal.Band_CreateMaskBand(self, *args)
def GetHistogram(self, *args, **kwargs):
"""GetHistogram(Band self, double min=-0.5, double max=255.5, int buckets=256, int include_out_of_range=0, int approx_ok=1, GDALProgressFunc callback=0, void * callback_data=None) -> CPLErr"""
return _gdal.Band_GetHistogram(self, *args, **kwargs)
def GetDefaultHistogram(self, *args, **kwargs):
"""GetDefaultHistogram(Band self, double * min_ret=None, double * max_ret=None, int * buckets_ret=None, GUIntBig ** ppanHistogram=None, int force=1, GDALProgressFunc callback=0, void * callback_data=None) -> CPLErr"""
return _gdal.Band_GetDefaultHistogram(self, *args, **kwargs)
def SetDefaultHistogram(self, *args):
"""SetDefaultHistogram(Band self, double min, double max, int buckets_in) -> CPLErr"""
return _gdal.Band_SetDefaultHistogram(self, *args)
def HasArbitraryOverviews(self, *args):
"""HasArbitraryOverviews(Band self) -> bool"""
return _gdal.Band_HasArbitraryOverviews(self, *args)
def GetCategoryNames(self, *args):
"""GetCategoryNames(Band self) -> char **"""
return _gdal.Band_GetCategoryNames(self, *args)
def SetCategoryNames(self, *args):
"""SetCategoryNames(Band self, char ** papszCategoryNames) -> CPLErr"""
return _gdal.Band_SetCategoryNames(self, *args)
def GetVirtualMem(self, *args, **kwargs):
"""GetVirtualMem(Band self, GDALRWFlag eRWFlag, int nXOff, int nYOff, int nXSize, int nYSize, int nBufXSize, int nBufYSize, GDALDataType eBufType, size_t nCacheSize, size_t nPageSizeHint, char ** options=None) -> VirtualMem"""
return _gdal.Band_GetVirtualMem(self, *args, **kwargs)
def GetVirtualMemAuto(self, *args, **kwargs):
"""GetVirtualMemAuto(Band self, GDALRWFlag eRWFlag, char ** options=None) -> VirtualMem"""
return _gdal.Band_GetVirtualMemAuto(self, *args, **kwargs)
def GetTiledVirtualMem(self, *args, **kwargs):
"""GetTiledVirtualMem(Band self, GDALRWFlag eRWFlag, int nXOff, int nYOff, int nXSize, int nYSize, int nTileXSize, int nTileYSize, GDALDataType eBufType, size_t nCacheSize, char ** options=None) -> VirtualMem"""
return _gdal.Band_GetTiledVirtualMem(self, *args, **kwargs)
def GetDataCoverageStatus(self, *args):
"""GetDataCoverageStatus(Band self, int nXOff, int nYOff, int nXSize, int nYSize, int nMaskFlagStop=0) -> int"""
return _gdal.Band_GetDataCoverageStatus(self, *args)
def AdviseRead(self, *args):
"""AdviseRead(Band self, int xoff, int yoff, int xsize, int ysize, int * buf_xsize=None, int * buf_ysize=None, GDALDataType * buf_type=None, char ** options=None) -> CPLErr"""
return _gdal.Band_AdviseRead(self, *args)
def AsMDArray(self, *args):
"""AsMDArray(Band self) -> MDArray"""
return _gdal.Band_AsMDArray(self, *args)
def ReadRaster1(self, *args, **kwargs):
"""ReadRaster1(Band self, double xoff, double yoff, double xsize, double ysize, int * buf_xsize=None, int * buf_ysize=None, GDALDataType * buf_type=None, GIntBig * buf_pixel_space=None, GIntBig * buf_line_space=None, GDALRIOResampleAlg resample_alg, GDALProgressFunc callback=0, void * callback_data=None, void * inputOutputBuf=None) -> CPLErr"""
return _gdal.Band_ReadRaster1(self, *args, **kwargs)
def ReadBlock(self, *args, **kwargs):
"""ReadBlock(Band self, int xoff, int yoff, void * buf_obj=None) -> CPLErr"""
return _gdal.Band_ReadBlock(self, *args, **kwargs)
def ComputeStatistics(self, *args):
"""ComputeStatistics(Band self, bool approx_ok, GDALProgressFunc callback=0, void * callback_data=None) -> CPLErr"""
# For backward compatibility. New SWIG has stricter typing and really
# enforces bool
approx_ok = args[0]
if approx_ok == 0:
approx_ok = False
elif approx_ok == 1:
approx_ok = True
new_args = [approx_ok]
for arg in args[1:]:
new_args.append( arg )
return _gdal.Band_ComputeStatistics(self, *new_args)
def ReadRaster(self, xoff=0, yoff=0, xsize=None, ysize=None,
buf_xsize=None, buf_ysize=None, buf_type=None,
buf_pixel_space=None, buf_line_space=None,
resample_alg=gdalconst.GRIORA_NearestNeighbour,
callback=None,
callback_data=None,
buf_obj=None):
if xsize is None:
xsize = self.XSize
if ysize is None:
ysize = self.YSize
return _gdal.Band_ReadRaster1(self, xoff, yoff, xsize, ysize,
buf_xsize, buf_ysize, buf_type,
buf_pixel_space, buf_line_space,
resample_alg, callback, callback_data,
buf_obj)
def WriteRaster(self, xoff, yoff, xsize, ysize,
buf_string,
buf_xsize=None, buf_ysize=None, buf_type=None,
buf_pixel_space=None, buf_line_space=None ):
if buf_xsize is None:
buf_xsize = xsize
if buf_ysize is None:
buf_ysize = ysize
# Redirect to numpy-friendly WriteArray() if buf_string is a numpy array
# and other arguments are compatible
if type(buf_string).__name__ == 'ndarray' and \
buf_xsize == xsize and buf_ysize == ysize and buf_type is None and \
buf_pixel_space is None and buf_line_space is None:
return self.WriteArray(buf_string, xoff=xoff, yoff=yoff)
if buf_type is None:
buf_type = self.DataType
return _gdal.Band_WriteRaster(self,
xoff, yoff, xsize, ysize,
buf_string, buf_xsize, buf_ysize, buf_type,
buf_pixel_space, buf_line_space )
def ReadAsArray(self, xoff=0, yoff=0, win_xsize=None, win_ysize=None,
buf_xsize=None, buf_ysize=None, buf_type=None, buf_obj=None,
resample_alg=gdalconst.GRIORA_NearestNeighbour,
callback=None,
callback_data=None):
""" Reading a chunk of a GDAL band into a numpy array. The optional (buf_xsize,buf_ysize,buf_type)
parameters should generally not be specified if buf_obj is specified. The array is returned"""
from osgeo import gdal_array
return gdal_array.BandReadAsArray(self, xoff, yoff,
win_xsize, win_ysize,
buf_xsize, buf_ysize, buf_type, buf_obj,
resample_alg=resample_alg,
callback=callback,
callback_data=callback_data)
def WriteArray(self, array, xoff=0, yoff=0,
resample_alg=gdalconst.GRIORA_NearestNeighbour,
callback=None,
callback_data=None):
from osgeo import gdal_array
return gdal_array.BandWriteArray(self, array, xoff, yoff,
resample_alg=resample_alg,
callback=callback,
callback_data=callback_data)
def GetVirtualMemArray(self, eAccess=gdalconst.GF_Read, xoff=0, yoff=0,
xsize=None, ysize=None, bufxsize=None, bufysize=None,
datatype=None,
cache_size = 10 * 1024 * 1024, page_size_hint = 0,
options=None):
"""Return a NumPy array for the band, seen as a virtual memory mapping.
An element is accessed with array[y][x].
Any reference to the array must be dropped before the last reference to the
related dataset is also dropped.
"""
from osgeo import gdal_array
if xsize is None:
xsize = self.XSize
if ysize is None:
ysize = self.YSize
if bufxsize is None:
bufxsize = self.XSize
if bufysize is None:
bufysize = self.YSize
if datatype is None:
datatype = self.DataType
if options is None:
virtualmem = self.GetVirtualMem(eAccess, xoff, yoff, xsize, ysize, bufxsize, bufysize, datatype, cache_size, page_size_hint)
else:
virtualmem = self.GetVirtualMem(eAccess, xoff, yoff, xsize, ysize, bufxsize, bufysize, datatype, cache_size, page_size_hint, options)
return gdal_array.VirtualMemGetArray(virtualmem)
def GetVirtualMemAutoArray(self, eAccess=gdalconst.GF_Read, options=None):
"""Return a NumPy array for the band, seen as a virtual memory mapping.
An element is accessed with array[y][x].
Any reference to the array must be dropped before the last reference to the
related dataset is also dropped.
"""
from osgeo import gdal_array
if options is None:
virtualmem = self.GetVirtualMemAuto(eAccess)
else:
virtualmem = self.GetVirtualMemAuto(eAccess,options)
return gdal_array.VirtualMemGetArray( virtualmem )
def GetTiledVirtualMemArray(self, eAccess=gdalconst.GF_Read, xoff=0, yoff=0,
xsize=None, ysize=None, tilexsize=256, tileysize=256,
datatype=None,
cache_size = 10 * 1024 * 1024, options=None):
"""Return a NumPy array for the band, seen as a virtual memory mapping with
a tile organization.
An element is accessed with array[tiley][tilex][y][x].
Any reference to the array must be dropped before the last reference to the
related dataset is also dropped.
"""
from osgeo import gdal_array
if xsize is None:
xsize = self.XSize
if ysize is None:
ysize = self.YSize
if datatype is None:
datatype = self.DataType
if options is None:
virtualmem = self.GetTiledVirtualMem(eAccess,xoff,yoff,xsize,ysize,tilexsize,tileysize,datatype,cache_size)
else:
virtualmem = self.GetTiledVirtualMem(eAccess,xoff,yoff,xsize,ysize,tilexsize,tileysize,datatype,cache_size,options)
return gdal_array.VirtualMemGetArray( virtualmem )
Band_swigregister = _gdal.Band_swigregister
Band_swigregister(Band)
class ColorTable(_object):
"""Proxy of C++ GDALColorTableShadow class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, ColorTable, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, ColorTable, name)
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(GDALColorTableShadow self, GDALPaletteInterp palette) -> ColorTable"""
this = _gdal.new_ColorTable(*args, **kwargs)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_ColorTable
__del__ = lambda self: None
def Clone(self, *args):
"""Clone(ColorTable self) -> ColorTable"""
return _gdal.ColorTable_Clone(self, *args)
def GetPaletteInterpretation(self, *args):
"""GetPaletteInterpretation(ColorTable self) -> GDALPaletteInterp"""
return _gdal.ColorTable_GetPaletteInterpretation(self, *args)
def GetCount(self, *args):
"""GetCount(ColorTable self) -> int"""
return _gdal.ColorTable_GetCount(self, *args)
def GetColorEntry(self, *args):
"""GetColorEntry(ColorTable self, int entry) -> ColorEntry"""
return _gdal.ColorTable_GetColorEntry(self, *args)
def GetColorEntryAsRGB(self, *args):
"""GetColorEntryAsRGB(ColorTable self, int entry, ColorEntry centry) -> int"""
return _gdal.ColorTable_GetColorEntryAsRGB(self, *args)
def SetColorEntry(self, *args):
"""SetColorEntry(ColorTable self, int entry, ColorEntry centry)"""
return _gdal.ColorTable_SetColorEntry(self, *args)
def CreateColorRamp(self, *args):
"""CreateColorRamp(ColorTable self, int nStartIndex, ColorEntry startcolor, int nEndIndex, ColorEntry endcolor)"""
return _gdal.ColorTable_CreateColorRamp(self, *args)
ColorTable_swigregister = _gdal.ColorTable_swigregister
ColorTable_swigregister(ColorTable)
class RasterAttributeTable(_object):
"""Proxy of C++ GDALRasterAttributeTableShadow class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, RasterAttributeTable, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, RasterAttributeTable, name)
__repr__ = _swig_repr
def __init__(self, *args):
"""__init__(GDALRasterAttributeTableShadow self) -> RasterAttributeTable"""
this = _gdal.new_RasterAttributeTable(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_RasterAttributeTable
__del__ = lambda self: None
def Clone(self, *args):
"""Clone(RasterAttributeTable self) -> RasterAttributeTable"""
return _gdal.RasterAttributeTable_Clone(self, *args)
def GetColumnCount(self, *args):
"""GetColumnCount(RasterAttributeTable self) -> int"""
return _gdal.RasterAttributeTable_GetColumnCount(self, *args)
def GetNameOfCol(self, *args):
"""GetNameOfCol(RasterAttributeTable self, int iCol) -> char const *"""
return _gdal.RasterAttributeTable_GetNameOfCol(self, *args)
def GetUsageOfCol(self, *args):
"""GetUsageOfCol(RasterAttributeTable self, int iCol) -> GDALRATFieldUsage"""
return _gdal.RasterAttributeTable_GetUsageOfCol(self, *args)
def GetTypeOfCol(self, *args):
"""GetTypeOfCol(RasterAttributeTable self, int iCol) -> GDALRATFieldType"""
return _gdal.RasterAttributeTable_GetTypeOfCol(self, *args)
def GetColOfUsage(self, *args):
"""GetColOfUsage(RasterAttributeTable self, GDALRATFieldUsage eUsage) -> int"""
return _gdal.RasterAttributeTable_GetColOfUsage(self, *args)
def GetRowCount(self, *args):
"""GetRowCount(RasterAttributeTable self) -> int"""
return _gdal.RasterAttributeTable_GetRowCount(self, *args)
def GetValueAsString(self, *args):
"""GetValueAsString(RasterAttributeTable self, int iRow, int iCol) -> char const *"""
return _gdal.RasterAttributeTable_GetValueAsString(self, *args)
def GetValueAsInt(self, *args):
"""GetValueAsInt(RasterAttributeTable self, int iRow, int iCol) -> int"""
return _gdal.RasterAttributeTable_GetValueAsInt(self, *args)
def GetValueAsDouble(self, *args):
"""GetValueAsDouble(RasterAttributeTable self, int iRow, int iCol) -> double"""
return _gdal.RasterAttributeTable_GetValueAsDouble(self, *args)
def SetValueAsString(self, *args):
"""SetValueAsString(RasterAttributeTable self, int iRow, int iCol, char const * pszValue)"""
return _gdal.RasterAttributeTable_SetValueAsString(self, *args)
def SetValueAsInt(self, *args):
"""SetValueAsInt(RasterAttributeTable self, int iRow, int iCol, int nValue)"""
return _gdal.RasterAttributeTable_SetValueAsInt(self, *args)
def SetValueAsDouble(self, *args):
"""SetValueAsDouble(RasterAttributeTable self, int iRow, int iCol, double dfValue)"""
return _gdal.RasterAttributeTable_SetValueAsDouble(self, *args)
def SetRowCount(self, *args):
"""SetRowCount(RasterAttributeTable self, int nCount)"""
return _gdal.RasterAttributeTable_SetRowCount(self, *args)
def CreateColumn(self, *args):
"""CreateColumn(RasterAttributeTable self, char const * pszName, GDALRATFieldType eType, GDALRATFieldUsage eUsage) -> int"""
return _gdal.RasterAttributeTable_CreateColumn(self, *args)
def GetLinearBinning(self, *args):
"""GetLinearBinning(RasterAttributeTable self) -> bool"""
return _gdal.RasterAttributeTable_GetLinearBinning(self, *args)
def SetLinearBinning(self, *args):
"""SetLinearBinning(RasterAttributeTable self, double dfRow0Min, double dfBinSize) -> int"""
return _gdal.RasterAttributeTable_SetLinearBinning(self, *args)
def GetRowOfValue(self, *args):
"""GetRowOfValue(RasterAttributeTable self, double dfValue) -> int"""
return _gdal.RasterAttributeTable_GetRowOfValue(self, *args)
def ChangesAreWrittenToFile(self, *args):
"""ChangesAreWrittenToFile(RasterAttributeTable self) -> int"""
return _gdal.RasterAttributeTable_ChangesAreWrittenToFile(self, *args)
def DumpReadable(self, *args):
"""DumpReadable(RasterAttributeTable self)"""
return _gdal.RasterAttributeTable_DumpReadable(self, *args)
def SetTableType(self, *args):
"""SetTableType(RasterAttributeTable self, GDALRATTableType eTableType)"""
return _gdal.RasterAttributeTable_SetTableType(self, *args)
def GetTableType(self, *args):
"""GetTableType(RasterAttributeTable self) -> GDALRATTableType"""
return _gdal.RasterAttributeTable_GetTableType(self, *args)
def WriteArray(self, array, field, start=0):
from osgeo import gdal_array
return gdal_array.RATWriteArray(self, array, field, start)
def ReadAsArray(self, field, start=0, length=None):
from osgeo import gdal_array
return gdal_array.RATReadArray(self, field, start, length)
RasterAttributeTable_swigregister = _gdal.RasterAttributeTable_swigregister
RasterAttributeTable_swigregister(RasterAttributeTable)
def TermProgress_nocb(*args, **kwargs):
"""TermProgress_nocb(double dfProgress, char const * pszMessage=None, void * pData=None) -> int"""
return _gdal.TermProgress_nocb(*args, **kwargs)
TermProgress = _gdal.TermProgress
def ComputeMedianCutPCT(*args, **kwargs):
"""ComputeMedianCutPCT(Band red, Band green, Band blue, int num_colors, ColorTable colors, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.ComputeMedianCutPCT(*args, **kwargs)
def DitherRGB2PCT(*args, **kwargs):
"""DitherRGB2PCT(Band red, Band green, Band blue, Band target, ColorTable colors, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.DitherRGB2PCT(*args, **kwargs)
def ReprojectImage(*args, **kwargs):
"""ReprojectImage(Dataset src_ds, Dataset dst_ds, char const * src_wkt=None, char const * dst_wkt=None, GDALResampleAlg eResampleAlg, double WarpMemoryLimit=0.0, double maxerror=0.0, GDALProgressFunc callback=0, void * callback_data=None, char ** options=None) -> CPLErr"""
return _gdal.ReprojectImage(*args, **kwargs)
def ComputeProximity(*args, **kwargs):
"""ComputeProximity(Band srcBand, Band proximityBand, char ** options=None, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.ComputeProximity(*args, **kwargs)
def RasterizeLayer(*args, **kwargs):
"""RasterizeLayer(Dataset dataset, int bands, Layer layer, void * pfnTransformer=None, void * pTransformArg=None, int burn_values=0, char ** options=None, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.RasterizeLayer(*args, **kwargs)
def Polygonize(*args, **kwargs):
"""Polygonize(Band srcBand, Band maskBand, Layer outLayer, int iPixValField, char ** options=None, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.Polygonize(*args, **kwargs)
def FPolygonize(*args, **kwargs):
"""FPolygonize(Band srcBand, Band maskBand, Layer outLayer, int iPixValField, char ** options=None, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.FPolygonize(*args, **kwargs)
def FillNodata(*args, **kwargs):
"""FillNodata(Band targetBand, Band maskBand, double maxSearchDist, int smoothingIterations, char ** options=None, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.FillNodata(*args, **kwargs)
def SieveFilter(*args, **kwargs):
"""SieveFilter(Band srcBand, Band maskBand, Band dstBand, int threshold, int connectedness=4, char ** options=None, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.SieveFilter(*args, **kwargs)
def RegenerateOverviews(*args, **kwargs):
"""RegenerateOverviews(Band srcBand, int overviewBandCount, char const * resampling, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.RegenerateOverviews(*args, **kwargs)
def RegenerateOverview(*args, **kwargs):
"""RegenerateOverview(Band srcBand, Band overviewBand, char const * resampling, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.RegenerateOverview(*args, **kwargs)
def ContourGenerate(*args, **kwargs):
"""ContourGenerate(Band srcBand, double contourInterval, double contourBase, int fixedLevelCount, int useNoData, double noDataValue, Layer dstLayer, int idField, int elevField, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.ContourGenerate(*args, **kwargs)
def ContourGenerateEx(*args, **kwargs):
"""ContourGenerateEx(Band srcBand, Layer dstLayer, char ** options=None, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.ContourGenerateEx(*args, **kwargs)
GVM_Diagonal = _gdal.GVM_Diagonal
GVM_Edge = _gdal.GVM_Edge
GVM_Max = _gdal.GVM_Max
GVM_Min = _gdal.GVM_Min
GVOT_NORMAL = _gdal.GVOT_NORMAL
GVOT_MIN_TARGET_HEIGHT_FROM_DEM = _gdal.GVOT_MIN_TARGET_HEIGHT_FROM_DEM
GVOT_MIN_TARGET_HEIGHT_FROM_GROUND = _gdal.GVOT_MIN_TARGET_HEIGHT_FROM_GROUND
def ViewshedGenerate(*args, **kwargs):
"""ViewshedGenerate(Band srcBand, char const * driverName, char const * targetRasterName, char ** creationOptions, double observerX, double observerY, double observerHeight, double targetHeight, double visibleVal, double invisibleVal, double outOfRangeVal, double noDataVal, double dfCurvCoeff, GDALViewshedMode mode, double maxDistance, GDALProgressFunc callback=0, void * callback_data=None, GDALViewshedOutputType heightMode=GVOT_NORMAL, char ** papszOptions=None) -> Dataset"""
return _gdal.ViewshedGenerate(*args, **kwargs)
def AutoCreateWarpedVRT(*args):
"""AutoCreateWarpedVRT(Dataset src_ds, char const * src_wkt=None, char const * dst_wkt=None, GDALResampleAlg eResampleAlg, double maxerror=0.0) -> Dataset"""
return _gdal.AutoCreateWarpedVRT(*args)
def CreatePansharpenedVRT(*args):
"""CreatePansharpenedVRT(char const * pszXML, Band panchroBand, int nInputSpectralBands) -> Dataset"""
return _gdal.CreatePansharpenedVRT(*args)
class GDALTransformerInfoShadow(_object):
"""Proxy of C++ GDALTransformerInfoShadow class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GDALTransformerInfoShadow, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GDALTransformerInfoShadow, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
__swig_destroy__ = _gdal.delete_GDALTransformerInfoShadow
__del__ = lambda self: None
def TransformPoint(self, *args):
"""
TransformPoint(GDALTransformerInfoShadow self, int bDstToSrc, double [3] inout) -> int
TransformPoint(GDALTransformerInfoShadow self, int bDstToSrc, double x, double y, double z=0.0) -> int
"""
return _gdal.GDALTransformerInfoShadow_TransformPoint(self, *args)
def TransformPoints(self, *args):
"""TransformPoints(GDALTransformerInfoShadow self, int bDstToSrc, int nCount) -> int"""
return _gdal.GDALTransformerInfoShadow_TransformPoints(self, *args)
def TransformGeolocations(self, *args, **kwargs):
"""TransformGeolocations(GDALTransformerInfoShadow self, Band xBand, Band yBand, Band zBand, GDALProgressFunc callback=0, void * callback_data=None, char ** options=None) -> int"""
return _gdal.GDALTransformerInfoShadow_TransformGeolocations(self, *args, **kwargs)
GDALTransformerInfoShadow_swigregister = _gdal.GDALTransformerInfoShadow_swigregister
GDALTransformerInfoShadow_swigregister(GDALTransformerInfoShadow)
def Transformer(*args):
"""Transformer(Dataset src, Dataset dst, char ** options) -> GDALTransformerInfoShadow"""
return _gdal.Transformer(*args)
def ApplyVerticalShiftGrid(*args, **kwargs):
"""ApplyVerticalShiftGrid(Dataset src_ds, Dataset grid_ds, bool inverse=False, double srcUnitToMeter=1.0, double dstUnitToMeter=1.0, char ** options=None) -> Dataset"""
return _gdal.ApplyVerticalShiftGrid(*args, **kwargs)
def ApplyGeoTransform(*args):
"""ApplyGeoTransform(double [6] padfGeoTransform, double dfPixel, double dfLine)"""
return _gdal.ApplyGeoTransform(*args)
def InvGeoTransform(*args):
"""InvGeoTransform(double [6] gt_in) -> RETURN_NONE"""
return _gdal.InvGeoTransform(*args)
def VersionInfo(*args):
"""VersionInfo(char const * request) -> char const *"""
return _gdal.VersionInfo(*args)
def AllRegister(*args):
"""AllRegister()"""
return _gdal.AllRegister(*args)
def GDALDestroyDriverManager(*args):
"""GDALDestroyDriverManager()"""
return _gdal.GDALDestroyDriverManager(*args)
def GetCacheMax(*args):
"""GetCacheMax() -> GIntBig"""
return _gdal.GetCacheMax(*args)
def GetCacheUsed(*args):
"""GetCacheUsed() -> GIntBig"""
return _gdal.GetCacheUsed(*args)
def SetCacheMax(*args):
"""SetCacheMax(GIntBig nBytes)"""
return _gdal.SetCacheMax(*args)
def GetDataTypeSize(*args):
"""GetDataTypeSize(GDALDataType eDataType) -> int"""
return _gdal.GetDataTypeSize(*args)
def DataTypeIsComplex(*args):
"""DataTypeIsComplex(GDALDataType eDataType) -> int"""
return _gdal.DataTypeIsComplex(*args)
def GetDataTypeName(*args):
"""GetDataTypeName(GDALDataType eDataType) -> char const *"""
return _gdal.GetDataTypeName(*args)
def GetDataTypeByName(*args):
"""GetDataTypeByName(char const * pszDataTypeName) -> GDALDataType"""
return _gdal.GetDataTypeByName(*args)
def GetColorInterpretationName(*args):
"""GetColorInterpretationName(GDALColorInterp eColorInterp) -> char const *"""
return _gdal.GetColorInterpretationName(*args)
def GetPaletteInterpretationName(*args):
"""GetPaletteInterpretationName(GDALPaletteInterp ePaletteInterp) -> char const *"""
return _gdal.GetPaletteInterpretationName(*args)
def DecToDMS(*args):
"""DecToDMS(double arg1, char const * arg2, int arg3=2) -> char const *"""
return _gdal.DecToDMS(*args)
def PackedDMSToDec(*args):
"""PackedDMSToDec(double dfPacked) -> double"""
return _gdal.PackedDMSToDec(*args)
def DecToPackedDMS(*args):
"""DecToPackedDMS(double dfDec) -> double"""
return _gdal.DecToPackedDMS(*args)
def ParseXMLString(*args):
"""ParseXMLString(char * pszXMLString) -> CPLXMLNode *"""
return _gdal.ParseXMLString(*args)
def SerializeXMLTree(*args):
"""SerializeXMLTree(CPLXMLNode * xmlnode) -> retStringAndCPLFree *"""
return _gdal.SerializeXMLTree(*args)
def GetJPEG2000Structure(*args):
"""GetJPEG2000Structure(char const * pszFilename, char ** options=None) -> CPLXMLNode *"""
return _gdal.GetJPEG2000Structure(*args)
def GetJPEG2000StructureAsString(*args):
"""GetJPEG2000StructureAsString(char const * pszFilename, char ** options=None) -> retStringAndCPLFree *"""
return _gdal.GetJPEG2000StructureAsString(*args)
def GetDriverCount(*args):
"""GetDriverCount() -> int"""
return _gdal.GetDriverCount(*args)
def GetDriverByName(*args):
"""GetDriverByName(char const * name) -> Driver"""
return _gdal.GetDriverByName(*args)
def GetDriver(*args):
"""GetDriver(int i) -> Driver"""
return _gdal.GetDriver(*args)
def Open(*args):
"""Open(char const * utf8_path, GDALAccess eAccess) -> Dataset"""
return _gdal.Open(*args)
def OpenEx(*args, **kwargs):
"""OpenEx(char const * utf8_path, unsigned int nOpenFlags=0, char ** allowed_drivers=None, char ** open_options=None, char ** sibling_files=None) -> Dataset"""
return _gdal.OpenEx(*args, **kwargs)
def OpenShared(*args):
"""OpenShared(char const * utf8_path, GDALAccess eAccess) -> Dataset"""
return _gdal.OpenShared(*args)
def IdentifyDriver(*args):
"""IdentifyDriver(char const * utf8_path, char ** papszSiblings=None) -> Driver"""
return _gdal.IdentifyDriver(*args)
def IdentifyDriverEx(*args, **kwargs):
"""IdentifyDriverEx(char const * utf8_path, unsigned int nIdentifyFlags=0, char ** allowed_drivers=None, char ** sibling_files=None) -> Driver"""
return _gdal.IdentifyDriverEx(*args, **kwargs)
def GeneralCmdLineProcessor(*args):
"""GeneralCmdLineProcessor(char ** papszArgv, int nOptions=0) -> char **"""
return _gdal.GeneralCmdLineProcessor(*args)
__version__ = _gdal.VersionInfo("RELEASE_NAME")
class GDALInfoOptions(_object):
"""Proxy of C++ GDALInfoOptions class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GDALInfoOptions, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GDALInfoOptions, name)
__repr__ = _swig_repr
def __init__(self, *args):
"""__init__(GDALInfoOptions self, char ** options) -> GDALInfoOptions"""
this = _gdal.new_GDALInfoOptions(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_GDALInfoOptions
__del__ = lambda self: None
GDALInfoOptions_swigregister = _gdal.GDALInfoOptions_swigregister
GDALInfoOptions_swigregister(GDALInfoOptions)
def InfoInternal(*args):
"""InfoInternal(Dataset hDataset, GDALInfoOptions infoOptions) -> retStringAndCPLFree *"""
return _gdal.InfoInternal(*args)
class GDALMultiDimInfoOptions(_object):
"""Proxy of C++ GDALMultiDimInfoOptions class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GDALMultiDimInfoOptions, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GDALMultiDimInfoOptions, name)
__repr__ = _swig_repr
def __init__(self, *args):
"""__init__(GDALMultiDimInfoOptions self, char ** options) -> GDALMultiDimInfoOptions"""
this = _gdal.new_GDALMultiDimInfoOptions(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_GDALMultiDimInfoOptions
__del__ = lambda self: None
GDALMultiDimInfoOptions_swigregister = _gdal.GDALMultiDimInfoOptions_swigregister
GDALMultiDimInfoOptions_swigregister(GDALMultiDimInfoOptions)
def MultiDimInfoInternal(*args):
"""MultiDimInfoInternal(Dataset hDataset, GDALMultiDimInfoOptions infoOptions) -> retStringAndCPLFree *"""
return _gdal.MultiDimInfoInternal(*args)
class GDALTranslateOptions(_object):
"""Proxy of C++ GDALTranslateOptions class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GDALTranslateOptions, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GDALTranslateOptions, name)
__repr__ = _swig_repr
def __init__(self, *args):
"""__init__(GDALTranslateOptions self, char ** options) -> GDALTranslateOptions"""
this = _gdal.new_GDALTranslateOptions(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_GDALTranslateOptions
__del__ = lambda self: None
GDALTranslateOptions_swigregister = _gdal.GDALTranslateOptions_swigregister
GDALTranslateOptions_swigregister(GDALTranslateOptions)
def TranslateInternal(*args):
"""TranslateInternal(char const * dest, Dataset dataset, GDALTranslateOptions translateOptions, GDALProgressFunc callback=0, void * callback_data=None) -> Dataset"""
return _gdal.TranslateInternal(*args)
class GDALWarpAppOptions(_object):
"""Proxy of C++ GDALWarpAppOptions class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GDALWarpAppOptions, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GDALWarpAppOptions, name)
__repr__ = _swig_repr
def __init__(self, *args):
"""__init__(GDALWarpAppOptions self, char ** options) -> GDALWarpAppOptions"""
this = _gdal.new_GDALWarpAppOptions(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_GDALWarpAppOptions
__del__ = lambda self: None
GDALWarpAppOptions_swigregister = _gdal.GDALWarpAppOptions_swigregister
GDALWarpAppOptions_swigregister(GDALWarpAppOptions)
def wrapper_GDALWarpDestDS(*args):
"""wrapper_GDALWarpDestDS(Dataset dstDS, int object_list_count, GDALWarpAppOptions warpAppOptions, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.wrapper_GDALWarpDestDS(*args)
def wrapper_GDALWarpDestName(*args):
"""wrapper_GDALWarpDestName(char const * dest, int object_list_count, GDALWarpAppOptions warpAppOptions, GDALProgressFunc callback=0, void * callback_data=None) -> Dataset"""
return _gdal.wrapper_GDALWarpDestName(*args)
class GDALVectorTranslateOptions(_object):
"""Proxy of C++ GDALVectorTranslateOptions class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GDALVectorTranslateOptions, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GDALVectorTranslateOptions, name)
__repr__ = _swig_repr
def __init__(self, *args):
"""__init__(GDALVectorTranslateOptions self, char ** options) -> GDALVectorTranslateOptions"""
this = _gdal.new_GDALVectorTranslateOptions(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_GDALVectorTranslateOptions
__del__ = lambda self: None
GDALVectorTranslateOptions_swigregister = _gdal.GDALVectorTranslateOptions_swigregister
GDALVectorTranslateOptions_swigregister(GDALVectorTranslateOptions)
def wrapper_GDALVectorTranslateDestDS(*args):
"""wrapper_GDALVectorTranslateDestDS(Dataset dstDS, Dataset srcDS, GDALVectorTranslateOptions options, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.wrapper_GDALVectorTranslateDestDS(*args)
def wrapper_GDALVectorTranslateDestName(*args):
"""wrapper_GDALVectorTranslateDestName(char const * dest, Dataset srcDS, GDALVectorTranslateOptions options, GDALProgressFunc callback=0, void * callback_data=None) -> Dataset"""
return _gdal.wrapper_GDALVectorTranslateDestName(*args)
class GDALDEMProcessingOptions(_object):
"""Proxy of C++ GDALDEMProcessingOptions class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GDALDEMProcessingOptions, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GDALDEMProcessingOptions, name)
__repr__ = _swig_repr
def __init__(self, *args):
"""__init__(GDALDEMProcessingOptions self, char ** options) -> GDALDEMProcessingOptions"""
this = _gdal.new_GDALDEMProcessingOptions(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_GDALDEMProcessingOptions
__del__ = lambda self: None
GDALDEMProcessingOptions_swigregister = _gdal.GDALDEMProcessingOptions_swigregister
GDALDEMProcessingOptions_swigregister(GDALDEMProcessingOptions)
def DEMProcessingInternal(*args):
"""DEMProcessingInternal(char const * dest, Dataset dataset, char const * pszProcessing, char const * pszColorFilename, GDALDEMProcessingOptions options, GDALProgressFunc callback=0, void * callback_data=None) -> Dataset"""
return _gdal.DEMProcessingInternal(*args)
class GDALNearblackOptions(_object):
"""Proxy of C++ GDALNearblackOptions class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GDALNearblackOptions, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GDALNearblackOptions, name)
__repr__ = _swig_repr
def __init__(self, *args):
"""__init__(GDALNearblackOptions self, char ** options) -> GDALNearblackOptions"""
this = _gdal.new_GDALNearblackOptions(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_GDALNearblackOptions
__del__ = lambda self: None
GDALNearblackOptions_swigregister = _gdal.GDALNearblackOptions_swigregister
GDALNearblackOptions_swigregister(GDALNearblackOptions)
def wrapper_GDALNearblackDestDS(*args):
"""wrapper_GDALNearblackDestDS(Dataset dstDS, Dataset srcDS, GDALNearblackOptions options, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.wrapper_GDALNearblackDestDS(*args)
def wrapper_GDALNearblackDestName(*args):
"""wrapper_GDALNearblackDestName(char const * dest, Dataset srcDS, GDALNearblackOptions options, GDALProgressFunc callback=0, void * callback_data=None) -> Dataset"""
return _gdal.wrapper_GDALNearblackDestName(*args)
class GDALGridOptions(_object):
"""Proxy of C++ GDALGridOptions class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GDALGridOptions, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GDALGridOptions, name)
__repr__ = _swig_repr
def __init__(self, *args):
"""__init__(GDALGridOptions self, char ** options) -> GDALGridOptions"""
this = _gdal.new_GDALGridOptions(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_GDALGridOptions
__del__ = lambda self: None
GDALGridOptions_swigregister = _gdal.GDALGridOptions_swigregister
GDALGridOptions_swigregister(GDALGridOptions)
def GridInternal(*args):
"""GridInternal(char const * dest, Dataset dataset, GDALGridOptions options, GDALProgressFunc callback=0, void * callback_data=None) -> Dataset"""
return _gdal.GridInternal(*args)
class GDALRasterizeOptions(_object):
"""Proxy of C++ GDALRasterizeOptions class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GDALRasterizeOptions, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GDALRasterizeOptions, name)
__repr__ = _swig_repr
def __init__(self, *args):
"""__init__(GDALRasterizeOptions self, char ** options) -> GDALRasterizeOptions"""
this = _gdal.new_GDALRasterizeOptions(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_GDALRasterizeOptions
__del__ = lambda self: None
GDALRasterizeOptions_swigregister = _gdal.GDALRasterizeOptions_swigregister
GDALRasterizeOptions_swigregister(GDALRasterizeOptions)
def wrapper_GDALRasterizeDestDS(*args):
"""wrapper_GDALRasterizeDestDS(Dataset dstDS, Dataset srcDS, GDALRasterizeOptions options, GDALProgressFunc callback=0, void * callback_data=None) -> int"""
return _gdal.wrapper_GDALRasterizeDestDS(*args)
def wrapper_GDALRasterizeDestName(*args):
"""wrapper_GDALRasterizeDestName(char const * dest, Dataset srcDS, GDALRasterizeOptions options, GDALProgressFunc callback=0, void * callback_data=None) -> Dataset"""
return _gdal.wrapper_GDALRasterizeDestName(*args)
class GDALBuildVRTOptions(_object):
"""Proxy of C++ GDALBuildVRTOptions class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GDALBuildVRTOptions, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GDALBuildVRTOptions, name)
__repr__ = _swig_repr
def __init__(self, *args):
"""__init__(GDALBuildVRTOptions self, char ** options) -> GDALBuildVRTOptions"""
this = _gdal.new_GDALBuildVRTOptions(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_GDALBuildVRTOptions
__del__ = lambda self: None
GDALBuildVRTOptions_swigregister = _gdal.GDALBuildVRTOptions_swigregister
GDALBuildVRTOptions_swigregister(GDALBuildVRTOptions)
def BuildVRTInternalObjects(*args):
"""BuildVRTInternalObjects(char const * dest, int object_list_count, GDALBuildVRTOptions options, GDALProgressFunc callback=0, void * callback_data=None) -> Dataset"""
return _gdal.BuildVRTInternalObjects(*args)
def BuildVRTInternalNames(*args):
"""BuildVRTInternalNames(char const * dest, char ** source_filenames, GDALBuildVRTOptions options, GDALProgressFunc callback=0, void * callback_data=None) -> Dataset"""
return _gdal.BuildVRTInternalNames(*args)
class GDALMultiDimTranslateOptions(_object):
"""Proxy of C++ GDALMultiDimTranslateOptions class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, GDALMultiDimTranslateOptions, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, GDALMultiDimTranslateOptions, name)
__repr__ = _swig_repr
def __init__(self, *args):
"""__init__(GDALMultiDimTranslateOptions self, char ** options) -> GDALMultiDimTranslateOptions"""
this = _gdal.new_GDALMultiDimTranslateOptions(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _gdal.delete_GDALMultiDimTranslateOptions
__del__ = lambda self: None
GDALMultiDimTranslateOptions_swigregister = _gdal.GDALMultiDimTranslateOptions_swigregister
GDALMultiDimTranslateOptions_swigregister(GDALMultiDimTranslateOptions)
def wrapper_GDALMultiDimTranslateDestName(*args):
"""wrapper_GDALMultiDimTranslateDestName(char const * dest, int object_list_count, GDALMultiDimTranslateOptions multiDimTranslateOptions, GDALProgressFunc callback=0, void * callback_data=None) -> Dataset"""
return _gdal.wrapper_GDALMultiDimTranslateDestName(*args)
# This file is compatible with both classic and new-style classes.
|
from django.shortcuts import render, HttpResponse
import requests
# Create your views here.
# def ultrasonic(request):
# return render(request, "ultrasonic/ultrasonic.html")
def ultrasonic(request):
# Verifica si hay un parámetro value en la petición GET
if 'value' in request.GET:
value = request.GET['value']
placeLatitude = request.GET['placeLatitude']
placeLength = request.GET['placeLength']
landArea = request.GET['landArea']
# Verifica si el value no esta vacio
if value:
# Crea el json para realizar la petición POST al Web Service
args = {
'type': 'Ultrasonic',
'value': value,
'placeLatitude': placeLatitude,
'placeLength': placeLength,
'landArea': landArea,
}
response = requests.post('http://backendpython.azurewebsites.net/ultrasonics/', args)
# Convierte la respuesta en JSON
ultrasonic_json = response.json()
# Realiza una petición GET al Web Services
response = requests.get('http://backendpython.azurewebsites.net/ultrasonics/')
# Convierte la respuesta en JSON
ultrasonics = response.json()
# Rederiza la respuesta en el template measure
return render(request, "ultrasonic/ultrasonic.html", {'ultrasonics': ultrasonics}) |
"""
45.把数组排成最小的数.py
时间复杂度:O(n)
空间复杂度:O(n)
"""
# -*- coding:utf-8 -*-
class LargeNumKey(str):
def __lt__(self, other):
return self + other < other + self # 要使”32“< "3"返回True,则"323" < "332"应返回True
class Solution:
def PrintMinNumber(self, numbers):
# write code here
if not numbers:
return ""
ret = sorted(map(str, numbers), key = LargeNumKey)
return int("".join(ret))
from functools import cmp_to_key
import random
class Solution2:
def PrintMinNumber(self, numbers):
# write code here
if not numbers:
return ""
temp = list(map(str, numbers))
print(temp)
ret = sorted(temp, key = cmp_to_key(lambda x, y: int(x + y) - int(y + x)))
return "".join(ret if ret[0] != "0" else "0")
if __name__ == "__main__":
numbers = [3, 5, 1, 4, 2]
s = Solution2()
ret = s.PrintMinNumber(numbers)
print(ret) |
import gzip
import os
from io import BytesIO
from unittest import mock
import pytest
pd = pytest.importorskip("pandas")
dd = pytest.importorskip("dask.dataframe")
from fsspec.compression import compr
from tlz import partition_all, valmap
import dask
import dask.dataframe as dd
from dask.base import compute_as_if_collection
from dask.bytes.core import read_bytes
from dask.bytes.utils import compress
from dask.core import flatten
from dask.dataframe._compat import tm
from dask.dataframe.io.csv import (
_infer_block_size,
auto_blocksize,
block_mask,
pandas_read_text,
text_blocks_to_pandas,
)
from dask.dataframe.utils import assert_eq, has_known_categories
from dask.utils import filetext, filetexts, tmpdir, tmpfile
# List of available compression format for test_read_csv_compression
compression_fmts = [fmt for fmt in compr] + [None]
def normalize_text(s):
return "\n".join(map(str.strip, s.strip().split("\n")))
def parse_filename(path):
return os.path.split(path)[1]
csv_text = """
name,amount
Alice,100
Bob,-200
Charlie,300
Dennis,400
Edith,-500
Frank,600
Alice,200
Frank,-200
Bob,600
Alice,400
Frank,200
Alice,300
Edith,600
""".strip()
tsv_text = csv_text.replace(",", "\t")
tsv_text2 = """
name amount
Alice 100
Bob -200
Charlie 300
Dennis 400
Edith -500
Frank 600
Alice 200
Frank -200
Bob 600
Alice 400
Frank 200
Alice 300
Edith 600
""".strip()
timeseries = """
Date,Open,High,Low,Close,Volume,Adj Close
2015-08-28,198.50,199.839996,197.919998,199.240005,143298900,199.240005
2015-08-27,197.020004,199.419998,195.210007,199.160004,266244700,199.160004
2015-08-26,192.080002,194.789993,188.369995,194.679993,328058100,194.679993
2015-08-25,195.429993,195.449997,186.919998,187.229996,353966700,187.229996
2015-08-24,197.630005,197.630005,182.399994,189.550003,478672400,189.550003
2015-08-21,201.729996,203.940002,197.520004,197.630005,328271500,197.630005
2015-08-20,206.509995,208.289993,203.899994,204.009995,185865600,204.009995
2015-08-19,209.089996,210.009995,207.350006,208.279999,167316300,208.279999
2015-08-18,210.259995,210.679993,209.699997,209.929993,70043800,209.929993
""".strip()
csv_files = {
"2014-01-01.csv": (
b"name,amount,id\n" b"Alice,100,1\n" b"Bob,200,2\n" b"Charlie,300,3\n"
),
"2014-01-02.csv": b"name,amount,id\n",
"2014-01-03.csv": (
b"name,amount,id\n" b"Dennis,400,4\n" b"Edith,500,5\n" b"Frank,600,6\n"
),
}
tsv_files = {k: v.replace(b",", b"\t") for (k, v) in csv_files.items()}
fwf_files = {
"2014-01-01.csv": (
b" name amount id\n"
b" Alice 100 1\n"
b" Bob 200 2\n"
b" Charlie 300 3\n"
),
"2014-01-02.csv": b" name amount id\n",
"2014-01-03.csv": (
b" name amount id\n"
b" Dennis 400 4\n"
b" Edith 500 5\n"
b" Frank 600 6\n"
),
}
expected = pd.concat([pd.read_csv(BytesIO(csv_files[k])) for k in sorted(csv_files)])
comment_header = b"""# some header lines
# that may be present
# in a data file
# before any data"""
csv_units_row = b"str, int, int\n"
tsv_units_row = csv_units_row.replace(b",", b"\t")
# Pandas has deprecated read_table
read_table_mark = pytest.mark.filterwarnings("ignore:read_table:FutureWarning")
csv_and_table = pytest.mark.parametrize(
"reader,files",
[
(pd.read_csv, csv_files),
pytest.param(pd.read_table, tsv_files, marks=read_table_mark),
(pd.read_fwf, fwf_files),
],
)
@csv_and_table
def test_pandas_read_text(reader, files):
b = files["2014-01-01.csv"]
df = pandas_read_text(reader, b, b"", {})
assert list(df.columns) == ["name", "amount", "id"]
assert len(df) == 3
assert df.id.sum() == 1 + 2 + 3
@csv_and_table
def test_pandas_read_text_kwargs(reader, files):
b = files["2014-01-01.csv"]
df = pandas_read_text(reader, b, b"", {"usecols": ["name", "id"]})
assert list(df.columns) == ["name", "id"]
@csv_and_table
def test_pandas_read_text_dtype_coercion(reader, files):
b = files["2014-01-01.csv"]
df = pandas_read_text(reader, b, b"", {}, {"amount": "float"})
assert df.amount.dtype == "float"
@csv_and_table
def test_pandas_read_text_with_header(reader, files):
b = files["2014-01-01.csv"]
header, b = b.split(b"\n", 1)
header = header + b"\n"
df = pandas_read_text(reader, b, header, {})
assert list(df.columns) == ["name", "amount", "id"]
assert len(df) == 3
assert df.id.sum() == 1 + 2 + 3
@csv_and_table
def test_text_blocks_to_pandas_simple(reader, files):
blocks = [[files[k]] for k in sorted(files)]
kwargs = {}
head = pandas_read_text(reader, files["2014-01-01.csv"], b"", {})
header = files["2014-01-01.csv"].split(b"\n")[0] + b"\n"
df = text_blocks_to_pandas(reader, blocks, header, head, kwargs)
assert isinstance(df, dd.DataFrame)
assert list(df.columns) == ["name", "amount", "id"]
values = text_blocks_to_pandas(reader, blocks, header, head, kwargs)
assert isinstance(values, dd.DataFrame)
assert hasattr(values, "dask")
assert len(values.dask) == 3
assert_eq(df.amount.sum(), 100 + 200 + 300 + 400 + 500 + 600)
@csv_and_table
def test_text_blocks_to_pandas_kwargs(reader, files):
blocks = [files[k] for k in sorted(files)]
blocks = [[b] for b in blocks]
kwargs = {"usecols": ["name", "id"]}
head = pandas_read_text(reader, files["2014-01-01.csv"], b"", kwargs)
header = files["2014-01-01.csv"].split(b"\n")[0] + b"\n"
df = text_blocks_to_pandas(reader, blocks, header, head, kwargs)
assert list(df.columns) == ["name", "id"]
result = df.compute()
assert (result.columns == df.columns).all()
@csv_and_table
def test_text_blocks_to_pandas_blocked(reader, files):
header = files["2014-01-01.csv"].split(b"\n")[0] + b"\n"
blocks = []
for k in sorted(files):
b = files[k]
lines = b.split(b"\n")
blocks.append([b"\n".join(bs) for bs in partition_all(2, lines)])
df = text_blocks_to_pandas(reader, blocks, header, expected.head(), {})
assert_eq(
df.compute().reset_index(drop=True),
expected.reset_index(drop=True),
check_dtype=False,
)
expected2 = expected[["name", "id"]]
df = text_blocks_to_pandas(
reader, blocks, header, expected2.head(), {"usecols": ["name", "id"]}
)
assert_eq(
df.compute().reset_index(drop=True),
expected2.reset_index(drop=True),
check_dtype=False,
)
@pytest.mark.parametrize(
"dd_read,pd_read,files",
[(dd.read_csv, pd.read_csv, csv_files), (dd.read_table, pd.read_table, tsv_files)],
)
@read_table_mark
def test_skiprows(dd_read, pd_read, files):
files = {name: comment_header + b"\n" + content for name, content in files.items()}
skip = len(comment_header.splitlines())
with filetexts(files, mode="b"):
df = dd_read("2014-01-*.csv", skiprows=skip)
expected_df = pd.concat([pd_read(n, skiprows=skip) for n in sorted(files)])
assert_eq(df, expected_df, check_dtype=False)
@pytest.mark.parametrize(
"dd_read,pd_read,files,units",
[
(dd.read_csv, pd.read_csv, csv_files, csv_units_row),
(dd.read_table, pd.read_table, tsv_files, tsv_units_row),
],
)
@read_table_mark
def test_skiprows_as_list(dd_read, pd_read, files, units):
files = {
name: (comment_header + b"\n" + content.replace(b"\n", b"\n" + units, 1))
for name, content in files.items()
}
skip = [0, 1, 2, 3, 5]
with filetexts(files, mode="b"):
df = dd_read("2014-01-*.csv", skiprows=skip)
expected_df = pd.concat([pd_read(n, skiprows=skip) for n in sorted(files)])
assert_eq(df, expected_df, check_dtype=False)
csv_blocks = [
[b"aa,bb\n1,1.0\n2,2.0", b"10,20\n30,40"],
[b"aa,bb\n1,1.0\n2,2.0", b"10,20\n30,40"],
]
tsv_blocks = [
[b"aa\tbb\n1\t1.0\n2\t2.0", b"10\t20\n30\t40"],
[b"aa\tbb\n1\t1.0\n2\t2.0", b"10\t20\n30\t40"],
]
@pytest.mark.parametrize(
"reader,blocks", [(pd.read_csv, csv_blocks), (pd.read_table, tsv_blocks)]
)
@read_table_mark
def test_enforce_dtypes(reader, blocks):
head = reader(BytesIO(blocks[0][0]), header=0)
header = blocks[0][0].split(b"\n")[0] + b"\n"
dfs = text_blocks_to_pandas(reader, blocks, header, head, {})
dfs = dask.compute(dfs, scheduler="sync")
assert all(df.dtypes.to_dict() == head.dtypes.to_dict() for df in dfs)
@pytest.mark.parametrize(
"reader,blocks", [(pd.read_csv, csv_blocks), (pd.read_table, tsv_blocks)]
)
@read_table_mark
def test_enforce_columns(reader, blocks):
# Replace second header with different column name
blocks = [blocks[0], [blocks[1][0].replace(b"a", b"A"), blocks[1][1]]]
head = reader(BytesIO(blocks[0][0]), header=0)
header = blocks[0][0].split(b"\n")[0] + b"\n"
with pytest.raises(ValueError):
dfs = text_blocks_to_pandas(reader, blocks, header, head, {}, enforce=True)
dask.compute(*dfs, scheduler="sync")
#############################
# read_csv and read_table #
#############################
@pytest.mark.parametrize(
"dd_read,pd_read,text,sep",
[
(dd.read_csv, pd.read_csv, csv_text, ","),
(dd.read_table, pd.read_table, tsv_text, "\t"),
(dd.read_table, pd.read_table, tsv_text2, r"\s+"),
],
)
@read_table_mark
def test_read_csv(dd_read, pd_read, text, sep):
with filetext(text) as fn:
f = dd_read(fn, blocksize=30, lineterminator=os.linesep, sep=sep)
assert list(f.columns) == ["name", "amount"]
# index may be different
result = f.compute(scheduler="sync").reset_index(drop=True)
assert_eq(result, pd_read(fn, sep=sep))
@pytest.mark.parametrize(
"dd_read,pd_read,text,skip",
[
(dd.read_csv, pd.read_csv, csv_text, 7),
(dd.read_table, pd.read_table, tsv_text, [1, 13]),
],
)
@read_table_mark
def test_read_csv_large_skiprows(dd_read, pd_read, text, skip):
names = ["name", "amount"]
with filetext(text) as fn:
actual = dd_read(fn, skiprows=skip, names=names)
assert_eq(actual, pd_read(fn, skiprows=skip, names=names))
@pytest.mark.parametrize(
"dd_read,pd_read,text,skip",
[
(dd.read_csv, pd.read_csv, csv_text, 7),
(dd.read_table, pd.read_table, tsv_text, [1, 12]),
],
)
@read_table_mark
def test_read_csv_skiprows_only_in_first_partition(dd_read, pd_read, text, skip):
names = ["name", "amount"]
with filetext(text) as fn:
with pytest.warns(UserWarning, match="sample=blocksize"):
actual = dd_read(fn, blocksize=200, skiprows=skip, names=names).compute()
assert_eq(actual, pd_read(fn, skiprows=skip, names=names))
with pytest.warns(UserWarning):
# if new sample does not contain all the skiprows, raise error
with pytest.raises(ValueError):
dd_read(fn, blocksize=30, skiprows=skip, names=names)
@pytest.mark.parametrize(
"dd_read,pd_read,files",
[(dd.read_csv, pd.read_csv, csv_files), (dd.read_table, pd.read_table, tsv_files)],
)
@read_table_mark
def test_read_csv_files(dd_read, pd_read, files):
with filetexts(files, mode="b"):
df = dd_read("2014-01-*.csv")
assert_eq(df, expected, check_dtype=False)
fn = "2014-01-01.csv"
df = dd_read(fn)
expected2 = pd_read(BytesIO(files[fn]))
assert_eq(df, expected2, check_dtype=False)
@pytest.mark.parametrize(
"dd_read,pd_read,files",
[(dd.read_csv, pd.read_csv, csv_files), (dd.read_table, pd.read_table, tsv_files)],
)
@read_table_mark
def test_read_csv_files_list(dd_read, pd_read, files):
with filetexts(files, mode="b"):
subset = sorted(files)[:2] # Just first 2
sol = pd.concat([pd_read(BytesIO(files[k])) for k in subset])
res = dd_read(subset)
assert_eq(res, sol, check_dtype=False)
with pytest.raises(ValueError):
dd_read([])
@pytest.mark.parametrize(
"dd_read,files", [(dd.read_csv, csv_files), (dd.read_table, tsv_files)]
)
@read_table_mark
def test_read_csv_include_path_column(dd_read, files):
with filetexts(files, mode="b"):
df = dd_read(
"2014-01-*.csv",
include_path_column=True,
converters={"path": parse_filename},
)
filenames = df.path.compute().unique()
assert "2014-01-01.csv" in filenames
assert "2014-01-02.csv" not in filenames
assert "2014-01-03.csv" in filenames
@pytest.mark.parametrize(
"dd_read,files", [(dd.read_csv, csv_files), (dd.read_table, tsv_files)]
)
@read_table_mark
def test_read_csv_include_path_column_as_str(dd_read, files):
with filetexts(files, mode="b"):
df = dd_read(
"2014-01-*.csv",
include_path_column="filename",
converters={"filename": parse_filename},
)
filenames = df.filename.compute().unique()
assert "2014-01-01.csv" in filenames
assert "2014-01-02.csv" not in filenames
assert "2014-01-03.csv" in filenames
@pytest.mark.parametrize(
"dd_read,files", [(dd.read_csv, csv_files), (dd.read_table, tsv_files)]
)
@read_table_mark
def test_read_csv_include_path_column_with_duplicate_name(dd_read, files):
with filetexts(files, mode="b"):
with pytest.raises(ValueError):
dd_read("2014-01-*.csv", include_path_column="name")
@pytest.mark.parametrize(
"dd_read,files", [(dd.read_csv, csv_files), (dd.read_table, tsv_files)]
)
@read_table_mark
def test_read_csv_include_path_column_is_dtype_category(dd_read, files):
with filetexts(files, mode="b"):
df = dd_read("2014-01-*.csv", include_path_column=True)
assert df.path.dtype == "category"
assert has_known_categories(df.path)
dfs = dd_read("2014-01-*.csv", include_path_column=True)
result = dfs.compute()
assert result.path.dtype == "category"
assert has_known_categories(result.path)
@pytest.mark.parametrize(
"dd_read,files", [(dd.read_csv, csv_files), (dd.read_table, tsv_files)]
)
@read_table_mark
def test_read_csv_include_path_column_with_multiple_partitions_per_file(dd_read, files):
with filetexts(files, mode="b"):
df = dd_read("2014-01-*.csv", blocksize="10B", include_path_column=True)
assert df.npartitions > 3
assert df.path.dtype == "category"
assert has_known_categories(df.path)
dfs = dd_read("2014-01-*.csv", blocksize="10B", include_path_column=True)
result = dfs.compute()
assert result.path.dtype == "category"
assert has_known_categories(result.path)
# After this point, we test just using read_csv, as all functionality
# for both is implemented using the same code.
def test_read_csv_index():
with filetext(csv_text) as fn:
f = dd.read_csv(fn, blocksize=20).set_index("amount")
result = f.compute(scheduler="sync")
assert result.index.name == "amount"
blocks = compute_as_if_collection(
dd.DataFrame, f.dask, f.__dask_keys__(), scheduler="sync"
)
for i, block in enumerate(blocks):
if i < len(f.divisions) - 2:
assert (block.index < f.divisions[i + 1]).all()
if i > 0:
assert (block.index >= f.divisions[i]).all()
expected = pd.read_csv(fn).set_index("amount")
assert_eq(result, expected)
def test_read_csv_skiprows_range():
with filetext(csv_text) as fn:
f = dd.read_csv(fn, skiprows=range(5))
result = f
expected = pd.read_csv(fn, skiprows=range(5))
assert_eq(result, expected)
def test_usecols():
with filetext(timeseries) as fn:
df = dd.read_csv(fn, blocksize=30, usecols=["High", "Low"])
df_select = df[["High"]]
expected = pd.read_csv(fn, usecols=["High", "Low"])
expected_select = expected[["High"]]
assert (df.compute().values == expected.values).all()
assert (df_select.compute().values == expected_select.values).all()
def test_string_blocksize():
with filetext(timeseries) as fn:
a = dd.read_csv(fn, blocksize="30B")
b = dd.read_csv(fn, blocksize="30")
assert a.npartitions == b.npartitions
c = dd.read_csv(fn, blocksize="64MiB")
assert c.npartitions == 1
def test_skipinitialspace():
text = normalize_text(
"""
name, amount
Alice,100
Bob,-200
Charlie,300
Dennis,400
Edith,-500
Frank,600
"""
)
with filetext(text) as fn:
df = dd.read_csv(fn, skipinitialspace=True, blocksize=20)
assert "amount" in df.columns
assert df.amount.max().compute() == 600
def test_consistent_dtypes():
text = normalize_text(
"""
name,amount
Alice,100.5
Bob,-200.5
Charlie,300
Dennis,400
Edith,-500
Frank,600
"""
)
with filetext(text) as fn:
df = dd.read_csv(fn, blocksize=30)
assert df.amount.compute().dtype == float
def test_consistent_dtypes_2():
text1 = normalize_text(
"""
name,amount
Alice,100
Bob,-200
Charlie,300
"""
)
text2 = normalize_text(
"""
name,amount
1,400
2,-500
Frank,600
"""
)
with filetexts({"foo.1.csv": text1, "foo.2.csv": text2}):
df = dd.read_csv("foo.*.csv", blocksize=25)
assert df.name.dtype == object
assert df.name.compute().dtype == object
def test_categorical_dtypes():
text1 = normalize_text(
"""
fruit,count
apple,10
apple,25
pear,100
orange,15
"""
)
text2 = normalize_text(
"""
fruit,count
apple,200
banana,300
orange,400
banana,10
"""
)
with filetexts({"foo.1.csv": text1, "foo.2.csv": text2}):
df = dd.read_csv("foo.*.csv", dtype={"fruit": "category"}, blocksize=25)
assert df.fruit.dtype == "category"
assert not has_known_categories(df.fruit)
res = df.compute()
assert res.fruit.dtype == "category"
assert sorted(res.fruit.cat.categories) == ["apple", "banana", "orange", "pear"]
def test_categorical_known():
text1 = normalize_text(
"""
A,B
a,a
b,b
a,a
"""
)
text2 = normalize_text(
"""
A,B
a,a
b,b
c,c
"""
)
dtype = pd.api.types.CategoricalDtype(["a", "b", "c"], ordered=False)
with filetexts({"foo.1.csv": text1, "foo.2.csv": text2}):
result = dd.read_csv("foo.*.csv", dtype={"A": "category", "B": "category"})
assert result.A.cat.known is False
assert result.B.cat.known is False
expected = pd.DataFrame(
{
"A": pd.Categorical(
["a", "b", "a", "a", "b", "c"], categories=dtype.categories
),
"B": pd.Categorical(
["a", "b", "a", "a", "b", "c"], categories=dtype.categories
),
},
index=[0, 1, 2, 0, 1, 2],
)
assert_eq(result, expected)
# Specify a dtype
result = dd.read_csv("foo.*.csv", dtype={"A": dtype, "B": "category"})
assert result.A.cat.known is True
assert result.B.cat.known is False
tm.assert_index_equal(result.A.cat.categories, dtype.categories)
assert result.A.cat.ordered is False
assert_eq(result, expected)
# ordered
dtype = pd.api.types.CategoricalDtype(["a", "b", "c"], ordered=True)
result = dd.read_csv("foo.*.csv", dtype={"A": dtype, "B": "category"})
expected["A"] = expected["A"].cat.as_ordered()
assert result.A.cat.known is True
assert result.B.cat.known is False
assert result.A.cat.ordered is True
assert_eq(result, expected)
# Specify "unknown" categories
result = dd.read_csv(
"foo.*.csv", dtype=pd.api.types.CategoricalDtype(ordered=False)
)
assert result.A.cat.known is False
result = dd.read_csv("foo.*.csv", dtype="category")
assert result.A.cat.known is False
@pytest.mark.slow
@pytest.mark.parametrize("compression", ["infer", "gzip"])
def test_compression_multiple_files(compression):
with tmpdir() as tdir:
f = gzip.open(os.path.join(tdir, "a.csv.gz"), "wb")
f.write(csv_text.encode())
f.close()
f = gzip.open(os.path.join(tdir, "b.csv.gz"), "wb")
f.write(csv_text.encode())
f.close()
with pytest.warns(UserWarning):
df = dd.read_csv(os.path.join(tdir, "*.csv.gz"), compression=compression)
assert len(df.compute()) == (len(csv_text.split("\n")) - 1) * 2
def test_empty_csv_file():
with filetext("a,b") as fn:
df = dd.read_csv(fn, header=0)
assert len(df.compute()) == 0
assert list(df.columns) == ["a", "b"]
def test_read_csv_no_sample():
with filetexts(csv_files, mode="b") as fn:
df = dd.read_csv(fn, sample=False)
assert list(df.columns) == ["name", "amount", "id"]
def test_read_csv_sensitive_to_enforce():
with filetexts(csv_files, mode="b"):
a = dd.read_csv("2014-01-*.csv", enforce=True)
b = dd.read_csv("2014-01-*.csv", enforce=False)
assert a._name != b._name
@pytest.mark.parametrize("blocksize", [None, 10])
@pytest.mark.parametrize("fmt", compression_fmts)
def test_read_csv_compression(fmt, blocksize):
if fmt and fmt not in compress:
pytest.skip("compress function not provided for %s" % fmt)
suffix = {"gzip": ".gz", "bz2": ".bz2", "zip": ".zip", "xz": ".xz"}.get(fmt, "")
files2 = valmap(compress[fmt], csv_files) if fmt else csv_files
renamed_files = {k + suffix: v for k, v in files2.items()}
with filetexts(renamed_files, mode="b"):
# This test is using `compression="infer"` (the default) for
# read_csv. The paths must have the appropriate extension.
if fmt and blocksize:
with pytest.warns(UserWarning):
df = dd.read_csv("2014-01-*.csv" + suffix, blocksize=blocksize)
else:
df = dd.read_csv("2014-01-*.csv" + suffix, blocksize=blocksize)
assert_eq(
df.compute(scheduler="sync").reset_index(drop=True),
expected.reset_index(drop=True),
check_dtype=False,
)
@pytest.mark.skip
def test_warn_non_seekable_files():
files2 = valmap(compress["gzip"], csv_files)
with filetexts(files2, mode="b"):
with pytest.warns(UserWarning) as w:
df = dd.read_csv("2014-01-*.csv", compression="gzip")
assert df.npartitions == 3
assert len(w) == 1
msg = str(w[0].message)
assert "gzip" in msg
assert "blocksize=None" in msg
with pytest.warns(None) as w:
df = dd.read_csv("2014-01-*.csv", compression="gzip", blocksize=None)
assert len(w) == 0
with pytest.raises(NotImplementedError):
with pytest.warns(UserWarning): # needed for pytest
df = dd.read_csv("2014-01-*.csv", compression="foo")
def test_windows_line_terminator():
text = "a,b\r\n1,2\r\n2,3\r\n3,4\r\n4,5\r\n5,6\r\n6,7"
with filetext(text) as fn:
df = dd.read_csv(fn, blocksize=5, lineterminator="\r\n")
assert df.b.sum().compute() == 2 + 3 + 4 + 5 + 6 + 7
assert df.a.sum().compute() == 1 + 2 + 3 + 4 + 5 + 6
def test_header_None():
with filetexts({".tmp.1.csv": "1,2", ".tmp.2.csv": "", ".tmp.3.csv": "3,4"}):
df = dd.read_csv(".tmp.*.csv", header=None)
expected = pd.DataFrame({0: [1, 3], 1: [2, 4]})
assert_eq(df.compute().reset_index(drop=True), expected)
def test_auto_blocksize():
assert isinstance(auto_blocksize(3000, 15), int)
assert auto_blocksize(3000, 3) == 100
assert auto_blocksize(5000, 2) == 250
def test__infer_block_size(monkeypatch):
"""
psutil returns a total memory of `None` on some systems
see https://github.com/dask/dask/pull/7601
"""
psutil = pytest.importorskip("psutil")
class MockOutput:
total = None
def mock_virtual_memory():
return MockOutput
monkeypatch.setattr(psutil, "virtual_memory", mock_virtual_memory)
assert _infer_block_size()
def test_auto_blocksize_max64mb():
blocksize = auto_blocksize(1000000000000, 3)
assert blocksize == int(64e6)
assert isinstance(blocksize, int)
def test_auto_blocksize_csv(monkeypatch):
psutil = pytest.importorskip("psutil")
total_memory = psutil.virtual_memory().total
cpu_count = psutil.cpu_count()
mock_read_bytes = mock.Mock(wraps=read_bytes)
monkeypatch.setattr(dask.dataframe.io.csv, "read_bytes", mock_read_bytes)
expected_block_size = auto_blocksize(total_memory, cpu_count)
with filetexts(csv_files, mode="b"):
dd.read_csv("2014-01-01.csv")
assert mock_read_bytes.called
assert mock_read_bytes.call_args[1]["blocksize"] == expected_block_size
def test_head_partial_line_fix():
files = {
".overflow1.csv": (
"a,b\n0,'abcdefghijklmnopqrstuvwxyz'\n1,'abcdefghijklmnopqrstuvwxyz'"
),
".overflow2.csv": "a,b\n111111,-11111\n222222,-22222\n333333,-33333\n",
}
with filetexts(files):
# 64 byte file, 52 characters is mid-quote; this should not cause exception in head-handling code.
dd.read_csv(".overflow1.csv", sample=52)
# 35 characters is cuts off before the second number on the last line
# Should sample to end of line, otherwise pandas will infer `b` to be
# a float dtype
df = dd.read_csv(".overflow2.csv", sample=35)
assert (df.dtypes == "i8").all()
def test_read_csv_raises_on_no_files():
fn = ".not.a.real.file.csv"
try:
dd.read_csv(fn)
assert False
except (OSError, IOError) as e:
assert fn in str(e)
def test_read_csv_has_deterministic_name():
with filetext(csv_text) as fn:
a = dd.read_csv(fn)
b = dd.read_csv(fn)
assert a._name == b._name
assert sorted(a.dask.keys(), key=str) == sorted(b.dask.keys(), key=str)
assert isinstance(a._name, str)
c = dd.read_csv(fn, skiprows=1, na_values=[0])
assert a._name != c._name
def test_multiple_read_csv_has_deterministic_name():
with filetexts({"_foo.1.csv": csv_text, "_foo.2.csv": csv_text}):
a = dd.read_csv("_foo.*.csv")
b = dd.read_csv("_foo.*.csv")
assert sorted(a.dask.keys(), key=str) == sorted(b.dask.keys(), key=str)
def test_read_csv_has_different_names_based_on_blocksize():
with filetext(csv_text) as fn:
a = dd.read_csv(fn, blocksize="10kB")
b = dd.read_csv(fn, blocksize="20kB")
assert a._name != b._name
def test_csv_with_integer_names():
with filetext("alice,1\nbob,2") as fn:
df = dd.read_csv(fn, header=None)
assert list(df.columns) == [0, 1]
def test_late_dtypes():
text = "numbers,names,more_numbers,integers,dates\n"
for i in range(1000):
text += "1,,2,3,2017-10-31 00:00:00\n"
text += "1.5,bar,2.5,3,4998-01-01 00:00:00\n"
date_msg = (
"\n"
"\n"
"-------------------------------------------------------------\n"
"\n"
"The following columns also failed to properly parse as dates:\n"
"\n"
"- dates\n"
"\n"
"This is usually due to an invalid value in that column. To\n"
"diagnose and fix it's recommended to drop these columns from the\n"
"`parse_dates` keyword, and manually convert them to dates later\n"
"using `dd.to_datetime`."
)
with filetext(text) as fn:
sol = pd.read_csv(fn)
msg = (
"Mismatched dtypes found in `pd.read_csv`/`pd.read_table`.\n"
"\n"
"+--------------+---------+----------+\n"
"| Column | Found | Expected |\n"
"+--------------+---------+----------+\n"
"| more_numbers | float64 | int64 |\n"
"| names | object | float64 |\n"
"| numbers | float64 | int64 |\n"
"+--------------+---------+----------+\n"
"\n"
"- names\n"
" ValueError(.*)\n"
"\n"
"Usually this is due to dask's dtype inference failing, and\n"
"*may* be fixed by specifying dtypes manually by adding:\n"
"\n"
"dtype={'more_numbers': 'float64',\n"
" 'names': 'object',\n"
" 'numbers': 'float64'}\n"
"\n"
"to the call to `read_csv`/`read_table`."
)
with pytest.raises(ValueError) as e:
dd.read_csv(fn, sample=50, parse_dates=["dates"]).compute(scheduler="sync")
assert e.match(msg + date_msg)
with pytest.raises(ValueError) as e:
dd.read_csv(fn, sample=50).compute(scheduler="sync")
assert e.match(msg)
msg = (
"Mismatched dtypes found in `pd.read_csv`/`pd.read_table`.\n"
"\n"
"+--------------+---------+----------+\n"
"| Column | Found | Expected |\n"
"+--------------+---------+----------+\n"
"| more_numbers | float64 | int64 |\n"
"| numbers | float64 | int64 |\n"
"+--------------+---------+----------+\n"
"\n"
"Usually this is due to dask's dtype inference failing, and\n"
"*may* be fixed by specifying dtypes manually by adding:\n"
"\n"
"dtype={'more_numbers': 'float64',\n"
" 'numbers': 'float64'}\n"
"\n"
"to the call to `read_csv`/`read_table`.\n"
"\n"
"Alternatively, provide `assume_missing=True` to interpret\n"
"all unspecified integer columns as floats."
)
with pytest.raises(ValueError) as e:
dd.read_csv(fn, sample=50, dtype={"names": "O"}).compute(scheduler="sync")
assert str(e.value) == msg
with pytest.raises(ValueError) as e:
dd.read_csv(
fn, sample=50, parse_dates=["dates"], dtype={"names": "O"}
).compute(scheduler="sync")
assert str(e.value) == msg + date_msg
msg = (
"Mismatched dtypes found in `pd.read_csv`/`pd.read_table`.\n"
"\n"
"The following columns failed to properly parse as dates:\n"
"\n"
"- dates\n"
"\n"
"This is usually due to an invalid value in that column. To\n"
"diagnose and fix it's recommended to drop these columns from the\n"
"`parse_dates` keyword, and manually convert them to dates later\n"
"using `dd.to_datetime`."
)
with pytest.raises(ValueError) as e:
dd.read_csv(
fn,
sample=50,
parse_dates=["dates"],
dtype={"more_numbers": float, "names": object, "numbers": float},
).compute(scheduler="sync")
assert str(e.value) == msg
# Specifying dtypes works
res = dd.read_csv(
fn,
sample=50,
dtype={"more_numbers": float, "names": object, "numbers": float},
)
assert_eq(res, sol)
def test_assume_missing():
text = "numbers,names,more_numbers,integers\n"
for i in range(1000):
text += "1,foo,2,3\n"
text += "1.5,bar,2.5,3\n"
with filetext(text) as fn:
sol = pd.read_csv(fn)
# assume_missing affects all columns
res = dd.read_csv(fn, sample=50, assume_missing=True)
assert_eq(res, sol.astype({"integers": float}))
# assume_missing doesn't override specified dtypes
res = dd.read_csv(
fn, sample=50, assume_missing=True, dtype={"integers": "int64"}
)
assert_eq(res, sol)
# assume_missing works with dtype=None
res = dd.read_csv(fn, sample=50, assume_missing=True, dtype=None)
assert_eq(res, sol.astype({"integers": float}))
text = "numbers,integers\n"
for i in range(1000):
text += "1,2\n"
text += "1.5,2\n"
with filetext(text) as fn:
sol = pd.read_csv(fn)
# assume_missing ignored when all dtypes specifed
df = dd.read_csv(fn, sample=30, dtype="int64", assume_missing=True)
assert df.numbers.dtype == "int64"
def test_index_col():
with filetext(csv_text) as fn:
try:
dd.read_csv(fn, blocksize=30, index_col="name")
assert False
except ValueError as e:
assert "set_index" in str(e)
def test_read_csv_with_datetime_index_partitions_one():
with filetext(timeseries) as fn:
df = pd.read_csv(
fn, index_col=0, header=0, usecols=[0, 4], parse_dates=["Date"]
)
# blocksize set to explicitly set to single chunk
ddf = dd.read_csv(
fn, header=0, usecols=[0, 4], parse_dates=["Date"], blocksize=10000000
).set_index("Date")
assert_eq(df, ddf)
# because fn is so small, by default, this will only be one chunk
ddf = dd.read_csv(fn, header=0, usecols=[0, 4], parse_dates=["Date"]).set_index(
"Date"
)
assert_eq(df, ddf)
def test_read_csv_with_datetime_index_partitions_n():
with filetext(timeseries) as fn:
df = pd.read_csv(
fn, index_col=0, header=0, usecols=[0, 4], parse_dates=["Date"]
)
# because fn is so small, by default, set chunksize small
ddf = dd.read_csv(
fn, header=0, usecols=[0, 4], parse_dates=["Date"], blocksize=400
).set_index("Date")
assert_eq(df, ddf)
xfail_pandas_100 = pytest.mark.xfail(reason="https://github.com/dask/dask/issues/5787")
@pytest.mark.parametrize(
"encoding",
[
pytest.param("utf-16", marks=xfail_pandas_100),
pytest.param("utf-16-le", marks=xfail_pandas_100),
"utf-16-be",
],
)
def test_encoding_gh601(encoding):
ar = pd.Series(range(0, 100))
br = ar % 7
cr = br * 3.3
dr = br / 1.9836
test_df = pd.DataFrame({"a": ar, "b": br, "c": cr, "d": dr})
with tmpfile(".csv") as fn:
test_df.to_csv(fn, encoding=encoding, index=False)
a = pd.read_csv(fn, encoding=encoding)
d = dd.read_csv(fn, encoding=encoding, blocksize=1000)
d = d.compute()
d.index = range(len(d.index))
assert_eq(d, a)
def test_read_csv_header_issue_823():
text = """a b c-d\n1 2 3\n4 5 6""".replace(" ", "\t")
with filetext(text) as fn:
df = dd.read_csv(fn, sep="\t")
assert_eq(df, pd.read_csv(fn, sep="\t"))
df = dd.read_csv(fn, delimiter="\t")
assert_eq(df, pd.read_csv(fn, delimiter="\t"))
def test_none_usecols():
with filetext(csv_text) as fn:
df = dd.read_csv(fn, usecols=None)
assert_eq(df, pd.read_csv(fn, usecols=None))
def test_parse_dates_multi_column():
pdmc_text = normalize_text(
"""
ID,date,time
10,2003-11-04,180036
11,2003-11-05,125640
12,2003-11-01,2519
13,2003-10-22,142559
14,2003-10-24,163113
15,2003-10-20,170133
16,2003-11-11,160448
17,2003-11-03,171759
18,2003-11-07,190928
19,2003-10-21,84623
20,2003-10-25,192207
21,2003-11-13,180156
22,2003-11-15,131037
"""
)
with filetext(pdmc_text) as fn:
ddf = dd.read_csv(fn, parse_dates=[["date", "time"]])
df = pd.read_csv(fn, parse_dates=[["date", "time"]])
assert (df.columns == ddf.columns).all()
assert len(df) == len(ddf)
def test_read_csv_sep():
sep_text = normalize_text(
"""
name###amount
alice###100
bob###200
charlie###300"""
)
with filetext(sep_text) as fn:
ddf = dd.read_csv(fn, sep="###", engine="python")
df = pd.read_csv(fn, sep="###", engine="python")
assert (df.columns == ddf.columns).all()
assert len(df) == len(ddf)
def test_read_csv_slash_r():
data = b"0,my\n1,data\n" * 1000 + b"2,foo\rbar"
with filetext(data, mode="wb") as fn:
dd.read_csv(
fn,
header=None,
sep=",",
lineterminator="\n",
names=["a", "b"],
blocksize=200,
).compute(scheduler="sync")
def test_read_csv_singleton_dtype():
data = b"a,b\n1,2\n3,4\n5,6"
with filetext(data, mode="wb") as fn:
assert_eq(pd.read_csv(fn, dtype=float), dd.read_csv(fn, dtype=float))
def test_robust_column_mismatch():
files = csv_files.copy()
k = sorted(files)[-1]
files[k] = files[k].replace(b"name", b"Name")
with filetexts(files, mode="b"):
ddf = dd.read_csv(
"2014-01-*.csv", header=None, skiprows=1, names=["name", "amount", "id"]
)
df = pd.read_csv("2014-01-01.csv")
assert (df.columns == ddf.columns).all()
assert_eq(ddf, ddf)
def test_different_columns_are_allowed():
files = csv_files.copy()
k = sorted(files)[-1]
files[k] = files[k].replace(b"name", b"address")
with filetexts(files, mode="b"):
ddf = dd.read_csv("2014-01-*.csv")
# since enforce is False, meta doesn't have to match computed
assert (ddf.columns == ["name", "amount", "id"]).all()
assert (ddf.compute().columns == ["name", "amount", "id", "address"]).all()
def test_error_if_sample_is_too_small():
text = "AAAAA,BBBBB,CCCCC,DDDDD,EEEEE\n1,2,3,4,5\n6,7,8,9,10\n11,12,13,14,15"
with filetext(text) as fn:
# Sample size stops mid header row
sample = 20
with pytest.raises(ValueError):
dd.read_csv(fn, sample=sample)
# Saying no header means this is fine
assert_eq(
dd.read_csv(fn, sample=sample, header=None), pd.read_csv(fn, header=None)
)
skiptext = "# skip\n# these\n# lines\n"
text = skiptext + text
with filetext(text) as fn:
# Sample size stops mid header row
sample = 20 + len(skiptext)
with pytest.raises(ValueError):
dd.read_csv(fn, sample=sample, skiprows=3)
# Saying no header means this is fine
assert_eq(
dd.read_csv(fn, sample=sample, header=None, skiprows=3),
pd.read_csv(fn, header=None, skiprows=3),
)
def test_read_csv_names_not_none():
text = (
"Alice,100\n"
"Bob,-200\n"
"Charlie,300\n"
"Dennis,400\n"
"Edith,-500\n"
"Frank,600\n"
)
names = ["name", "amount"]
with filetext(text) as fn:
ddf = dd.read_csv(fn, names=names, blocksize=16)
df = pd.read_csv(fn, names=names)
assert_eq(df, ddf, check_index=False)
############
# to_csv #
############
def test_to_csv():
df = pd.DataFrame({"x": ["a", "b", "c", "d"], "y": [1, 2, 3, 4]})
for npartitions in [1, 2]:
a = dd.from_pandas(df, npartitions)
with tmpdir() as dn:
a.to_csv(dn, index=False)
result = dd.read_csv(os.path.join(dn, "*")).compute().reset_index(drop=True)
assert_eq(result, df)
with tmpdir() as dn:
r = a.to_csv(dn, index=False, compute=False)
dask.compute(*r, scheduler="sync")
result = dd.read_csv(os.path.join(dn, "*")).compute().reset_index(drop=True)
assert_eq(result, df)
with tmpdir() as dn:
fn = os.path.join(dn, "data_*.csv")
a.to_csv(fn, index=False)
result = dd.read_csv(fn).compute().reset_index(drop=True)
assert_eq(result, df)
def test_to_csv_multiple_files_cornercases():
df = pd.DataFrame({"x": ["a", "b", "c", "d"], "y": [1, 2, 3, 4]})
a = dd.from_pandas(df, 2)
with tmpdir() as dn:
with pytest.raises(ValueError):
fn = os.path.join(dn, "data_*_*.csv")
a.to_csv(fn)
df16 = pd.DataFrame(
{
"x": [
"a",
"b",
"c",
"d",
"e",
"f",
"g",
"h",
"i",
"j",
"k",
"l",
"m",
"n",
"o",
"p",
],
"y": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16],
}
)
a = dd.from_pandas(df16, 16)
with tmpdir() as dn:
fn = os.path.join(dn, "data_*.csv")
a.to_csv(fn, index=False)
result = dd.read_csv(fn).compute().reset_index(drop=True)
assert_eq(result, df16)
# test handling existing files when links are optimized out
a = dd.from_pandas(df, 2)
with tmpdir() as dn:
a.to_csv(dn, index=False)
fn = os.path.join(dn, "data_*.csv")
a.to_csv(fn, mode="w", index=False)
result = dd.read_csv(fn).compute().reset_index(drop=True)
assert_eq(result, df)
# test handling existing files when links are optimized out
a = dd.from_pandas(df16, 16)
with tmpdir() as dn:
a.to_csv(dn, index=False)
fn = os.path.join(dn, "data_*.csv")
a.to_csv(fn, mode="w", index=False)
result = dd.read_csv(fn).compute().reset_index(drop=True)
assert_eq(result, df16)
def test_to_single_csv():
df = pd.DataFrame({"x": ["a", "b", "c", "d"], "y": [1, 2, 3, 4]})
for npartitions in [1, 2]:
a = dd.from_pandas(df, npartitions)
with tmpdir() as dn:
fn = os.path.join(dn, "test.csv")
a.to_csv(fn, index=False, single_file=True)
result = dd.read_csv(fn).compute().reset_index(drop=True)
assert_eq(result, df)
with tmpdir() as dn:
fn = os.path.join(dn, "test.csv")
r = a.to_csv(fn, index=False, compute=False, single_file=True)
dask.compute(r, scheduler="sync")
result = dd.read_csv(fn).compute().reset_index(drop=True)
assert_eq(result, df)
def test_to_single_csv_with_name_function():
df = pd.DataFrame({"x": ["a", "b", "c", "d"], "y": [1, 2, 3, 4]})
a = dd.from_pandas(df, 1)
with tmpdir() as dn:
fn = os.path.join(dn, "test.csv")
with pytest.raises(
ValueError,
match="name_function is not supported under the single file mode",
):
a.to_csv(fn, name_function=lambda x: x, index=False, single_file=True)
def test_to_single_csv_with_header_first_partition_only():
df = pd.DataFrame({"x": ["a", "b", "c", "d"], "y": [1, 2, 3, 4]})
a = dd.from_pandas(df, 1)
with tmpdir() as dn:
fn = os.path.join(dn, "test.csv")
with pytest.raises(
ValueError,
match="header_first_partition_only cannot be False in the single file mode.",
):
a.to_csv(
fn, index=False, header_first_partition_only=False, single_file=True
)
def test_to_single_csv_gzip():
df = pd.DataFrame({"x": ["a", "b", "c", "d"], "y": [1, 2, 3, 4]})
for npartitions in [1, 2]:
a = dd.from_pandas(df, npartitions)
with tmpdir() as dn:
fn = os.path.join(dn, "test.csv.gz")
a.to_csv(fn, index=False, compression="gzip", single_file=True)
result = pd.read_csv(fn, compression="gzip").reset_index(drop=True)
assert_eq(result, df)
@pytest.mark.xfail(reason="to_csv does not support compression")
def test_to_csv_gzip():
df = pd.DataFrame(
{"x": ["a", "b", "c", "d"], "y": [1, 2, 3, 4]}, index=[1.0, 2.0, 3.0, 4.0]
)
for npartitions in [1, 2]:
a = dd.from_pandas(df, npartitions)
with tmpfile("csv") as fn:
a.to_csv(fn, compression="gzip")
result = pd.read_csv(fn, index_col=0, compression="gzip")
tm.assert_frame_equal(result, df)
def test_to_csv_nodir():
# See #6062 https://github.com/intake/filesystem_spec/pull/271 and
df0 = pd.DataFrame(
{"x": ["a", "b", "c", "d"], "y": [1, 2, 3, 4]}, index=[1.0, 2.0, 3.0, 4.0]
)
df = dd.from_pandas(df0, npartitions=2)
with tmpdir() as dir:
dir0 = os.path.join(str(dir), "createme")
df.to_csv(dir0)
assert "createme" in os.listdir(dir)
assert os.listdir(dir0)
result = dd.read_csv(os.path.join(dir0, "*")).compute()
assert (result.x.values == df0.x.values).all()
def test_to_csv_simple():
df0 = pd.DataFrame(
{"x": ["a", "b", "c", "d"], "y": [1, 2, 3, 4]}, index=[1.0, 2.0, 3.0, 4.0]
)
df = dd.from_pandas(df0, npartitions=2)
with tmpdir() as dir:
dir = str(dir)
df.to_csv(dir)
assert os.listdir(dir)
result = dd.read_csv(os.path.join(dir, "*")).compute()
assert (result.x.values == df0.x.values).all()
def test_to_csv_series():
df0 = pd.Series(["a", "b", "c", "d"], index=[1.0, 2.0, 3.0, 4.0])
df = dd.from_pandas(df0, npartitions=2)
with tmpdir() as dir:
dir = str(dir)
df.to_csv(dir, header=False)
assert os.listdir(dir)
result = dd.read_csv(os.path.join(dir, "*"), header=None, names=["x"]).compute()
assert (result.x == df0).all()
def test_to_csv_with_get():
from dask.multiprocessing import get as mp_get
flag = [False]
def my_get(*args, **kwargs):
flag[0] = True
return mp_get(*args, **kwargs)
df = pd.DataFrame({"x": ["a", "b", "c", "d"], "y": [1, 2, 3, 4]})
ddf = dd.from_pandas(df, npartitions=2)
with tmpdir() as dn:
ddf.to_csv(dn, index=False, compute_kwargs={"scheduler": my_get})
assert flag[0]
result = dd.read_csv(os.path.join(dn, "*"))
assert_eq(result, df, check_index=False)
def test_to_csv_warns_using_scheduler_argument():
from dask.multiprocessing import get as mp_get
df = pd.DataFrame({"x": ["a", "b", "c", "d"], "y": [1, 2, 3, 4]})
ddf = dd.from_pandas(df, npartitions=2)
def my_get(*args, **kwargs):
return mp_get(*args, **kwargs)
with tmpdir() as dn:
with pytest.warns(FutureWarning):
ddf.to_csv(dn, index=False, scheduler=my_get)
def test_to_csv_errors_using_multiple_scheduler_args():
from dask.multiprocessing import get as mp_get
df = pd.DataFrame({"x": ["a", "b", "c", "d"], "y": [1, 2, 3, 4]})
ddf = dd.from_pandas(df, npartitions=2)
def my_get(*args, **kwargs):
return mp_get(*args, **kwargs)
with tmpdir() as dn:
with pytest.raises(ValueError) and pytest.warns(FutureWarning):
ddf.to_csv(
dn, index=False, scheduler=my_get, compute_kwargs={"scheduler": my_get}
)
def test_to_csv_keeps_all_non_scheduler_compute_kwargs():
from dask.multiprocessing import get as mp_get
def my_get(*args, **kwargs):
assert kwargs["test_kwargs_passed"] == "foobar"
return mp_get(*args, **kwargs)
df = pd.DataFrame({"x": ["a", "b", "c", "d"], "y": [1, 2, 3, 4]})
ddf = dd.from_pandas(df, npartitions=2)
with tmpdir() as dn:
ddf.to_csv(
dn,
index=False,
compute_kwargs={"scheduler": my_get, "test_kwargs_passed": "foobar"},
)
def test_to_csv_paths():
df = pd.DataFrame({"A": range(10)})
ddf = dd.from_pandas(df, npartitions=2)
paths = ddf.to_csv("foo*.csv")
assert paths[0].endswith("foo0.csv")
assert paths[1].endswith("foo1.csv")
os.remove("foo0.csv")
os.remove("foo1.csv")
@pytest.mark.parametrize("header, expected", [(False, ""), (True, "x,y\n")])
def test_to_csv_header_empty_dataframe(header, expected):
dfe = pd.DataFrame({"x": [], "y": []})
ddfe = dd.from_pandas(dfe, npartitions=1)
with tmpdir() as dn:
ddfe.to_csv(os.path.join(dn, "fooe*.csv"), index=False, header=header)
assert not os.path.exists(os.path.join(dn, "fooe1.csv"))
filename = os.path.join(dn, "fooe0.csv")
with open(filename, "r") as fp:
line = fp.readline()
assert line == expected
os.remove(filename)
@pytest.mark.parametrize(
"header,header_first_partition_only,expected_first,expected_next",
[
(False, False, "a,1\n", "d,4\n"),
(True, False, "x,y\n", "x,y\n"),
(False, True, "a,1\n", "d,4\n"),
(True, True, "x,y\n", "d,4\n"),
(["aa", "bb"], False, "aa,bb\n", "aa,bb\n"),
(["aa", "bb"], True, "aa,bb\n", "d,4\n"),
],
)
def test_to_csv_header(
header, header_first_partition_only, expected_first, expected_next
):
partition_count = 2
df = pd.DataFrame({"x": ["a", "b", "c", "d", "e", "f"], "y": [1, 2, 3, 4, 5, 6]})
ddf = dd.from_pandas(df, npartitions=partition_count)
with tmpdir() as dn:
# Test NO header case
# (header=False, header_first_chunk_only not passed)
ddf.to_csv(
os.path.join(dn, "fooa*.csv"),
index=False,
header=header,
header_first_partition_only=header_first_partition_only,
)
filename = os.path.join(dn, "fooa0.csv")
with open(filename, "r") as fp:
line = fp.readline()
assert line == expected_first
os.remove(filename)
filename = os.path.join(dn, "fooa1.csv")
with open(filename, "r") as fp:
line = fp.readline()
assert line == expected_next
os.remove(filename)
def test_to_csv_line_ending():
df = pd.DataFrame({"x": [0]})
ddf = dd.from_pandas(df, npartitions=1)
expected = {b"0\r\n", b"0\n"} # either/or
# For comparison...
# unexpected = {b'0\r\r\n'}
# This test addresses GH4809, and checks that only (at most) one
# '\r' character is written per line when writing to csv.
# In case it's correct (on UNIX) to have no '\r' at all, this test
# considers either '\r\n' or '\n' as appropriate line endings,
# but not '\r\r\n'.
with tmpdir() as dn:
ddf.to_csv(os.path.join(dn, "foo*.csv"), header=False, index=False)
filename = os.path.join(dn, "foo0.csv")
with open(filename, "rb") as f:
raw = f.read()
assert raw in expected
@pytest.mark.parametrize(
"block_lists",
[
[[1, 2], [3], [4, 5, 6]],
[],
[[], [], [1], [], [1]],
[list(range(i)) for i in range(10)],
],
)
def test_block_mask(block_lists):
mask = list(block_mask(block_lists))
assert len(mask) == len(list(flatten(block_lists)))
def test_reading_empty_csv_files_with_path():
with tmpdir() as tdir:
for k, content in enumerate(["0, 1, 2", "", "6, 7, 8"]):
with open(os.path.join(tdir, str(k) + ".csv"), "w") as file:
file.write(content)
result = dd.read_csv(
os.path.join(tdir, "*.csv"),
include_path_column=True,
converters={"path": parse_filename},
names=["A", "B", "C"],
).compute()
df = pd.DataFrame(
{
"A": [0, 6],
"B": [1, 7],
"C": [2, 8],
"path": ["0.csv", "2.csv"],
}
)
df["path"] = df["path"].astype("category")
assert_eq(result, df, check_index=False)
def test_read_csv_groupby_get_group(tmpdir):
# https://github.com/dask/dask/issues/7005
path = os.path.join(str(tmpdir), "test.csv")
df1 = pd.DataFrame([{"foo": 10, "bar": 4}])
df1.to_csv(path, index=False)
ddf1 = dd.read_csv(path)
ddfs = ddf1.groupby("foo")
assert_eq(df1, ddfs.get_group(10).compute())
def test_csv_getitem_column_order(tmpdir):
# See: https://github.com/dask/dask/issues/7759
path = os.path.join(str(tmpdir), "test.csv")
columns = list("abcdefghijklmnopqrstuvwxyz")
values = list(range(len(columns)))
df1 = pd.DataFrame([{c: v for c, v in zip(columns, values)}])
df1.to_csv(path)
# Use disordered and duplicated column selection
columns = list("hczzkylaape")
df2 = dd.read_csv(path)[columns].head(1)
assert_eq(df1[columns], df2)
def test_csv_parse_fail(tmpdir):
# See GH #7680
path = os.path.join(str(tmpdir), "test.csv")
data = b'a,b\n1,"hi\n"\n2,"oi\n"\n'
expected = pd.read_csv(BytesIO(data))
with open(path, "wb") as f:
f.write(data)
with pytest.raises(ValueError, match="EOF encountered"):
dd.read_csv(path, sample=13)
df = dd.read_csv(path, sample=13, sample_rows=1)
assert_eq(df, expected)
def test_csv_name_should_be_different_even_if_head_is_same(tmpdir):
# https://github.com/dask/dask/issues/7904
import random
from shutil import copyfile
old_csv_path = os.path.join(str(tmpdir), "old.csv")
new_csv_path = os.path.join(str(tmpdir), "new_csv")
# Create random CSV
with open(old_csv_path, "w") as f:
for _ in range(10):
f.write(
f"{random.randrange(1, 10**9):09}, {random.randrange(1, 10**9):09}, {random.randrange(1, 10**9):09}\n"
)
copyfile(old_csv_path, new_csv_path)
# Add three new rows
with open(new_csv_path, "a") as f:
for _ in range(3):
f.write(
f"{random.randrange(1, 10**9):09}, {random.randrange(1, 10**9):09}, {random.randrange(1, 10**9):09}\n"
)
new_df = dd.read_csv(
new_csv_path, header=None, delimiter=",", dtype=str, blocksize=None
)
old_df = dd.read_csv(
old_csv_path, header=None, delimiter=",", dtype=str, blocksize=None
)
assert new_df.dask.keys() != old_df.dask.keys()
|
(global["webpackJsonp"] = global["webpackJsonp"] || []).push([["pages/home/allClass"],{
/***/ 352:
/*!****************************************************************************************!*\
!*** D:/workspace/course/course_miniweixin/main.js?{"page":"pages%2Fhome%2FallClass"} ***!
\****************************************************************************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
/* WEBPACK VAR INJECTION */(function(createPage) {__webpack_require__(/*! uni-pages */ 4);
var _vue = _interopRequireDefault(__webpack_require__(/*! vue */ 2));
var _allClass = _interopRequireDefault(__webpack_require__(/*! ./pages/home/allClass.vue */ 353));function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}
createPage(_allClass.default);
/* WEBPACK VAR INJECTION */}.call(this, __webpack_require__(/*! ./node_modules/@dcloudio/uni-mp-weixin/dist/index.js */ 1)["createPage"]))
/***/ }),
/***/ 353:
/*!*********************************************************************!*\
!*** D:/workspace/course/course_miniweixin/pages/home/allClass.vue ***!
\*********************************************************************/
/*! no static exports found */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _allClass_vue_vue_type_template_id_34dbd744_scoped_true___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./allClass.vue?vue&type=template&id=34dbd744&scoped=true& */ 354);
/* harmony import */ var _allClass_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./allClass.vue?vue&type=script&lang=js& */ 356);
/* harmony reexport (unknown) */ for(var __WEBPACK_IMPORT_KEY__ in _allClass_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__) if(__WEBPACK_IMPORT_KEY__ !== 'default') (function(key) { __webpack_require__.d(__webpack_exports__, key, function() { return _allClass_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__[key]; }) }(__WEBPACK_IMPORT_KEY__));
/* harmony import */ var _allClass_vue_vue_type_style_index_0_id_34dbd744_lang_scss_scoped_true___WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./allClass.vue?vue&type=style&index=0&id=34dbd744&lang=scss&scoped=true& */ 358);
/* harmony import */ var _C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./node_modules/@dcloudio/vue-cli-plugin-uni/packages/vue-loader/lib/runtime/componentNormalizer.js */ 10);
var renderjs
/* normalize component */
var component = Object(_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_3__["default"])(
_allClass_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__["default"],
_allClass_vue_vue_type_template_id_34dbd744_scoped_true___WEBPACK_IMPORTED_MODULE_0__["render"],
_allClass_vue_vue_type_template_id_34dbd744_scoped_true___WEBPACK_IMPORTED_MODULE_0__["staticRenderFns"],
false,
null,
"34dbd744",
null,
false,
_allClass_vue_vue_type_template_id_34dbd744_scoped_true___WEBPACK_IMPORTED_MODULE_0__["components"],
renderjs
)
component.options.__file = "pages/home/allClass.vue"
/* harmony default export */ __webpack_exports__["default"] = (component.exports);
/***/ }),
/***/ 354:
/*!****************************************************************************************************************!*\
!*** D:/workspace/course/course_miniweixin/pages/home/allClass.vue?vue&type=template&id=34dbd744&scoped=true& ***!
\****************************************************************************************************************/
/*! exports provided: render, staticRenderFns, recyclableRender, components */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_16_0_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_template_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_uni_app_loader_page_meta_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_template_id_34dbd744_scoped_true___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/webpack-preprocess-loader??ref--16-0!./node_modules/@dcloudio/webpack-uni-mp-loader/lib/template.js!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/webpack-uni-app-loader/page-meta.js!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/vue-loader/lib??vue-loader-options!./node_modules/@dcloudio/webpack-uni-mp-loader/lib/style.js!./allClass.vue?vue&type=template&id=34dbd744&scoped=true& */ 355);
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "render", function() { return _C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_16_0_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_template_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_uni_app_loader_page_meta_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_template_id_34dbd744_scoped_true___WEBPACK_IMPORTED_MODULE_0__["render"]; });
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "staticRenderFns", function() { return _C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_16_0_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_template_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_uni_app_loader_page_meta_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_template_id_34dbd744_scoped_true___WEBPACK_IMPORTED_MODULE_0__["staticRenderFns"]; });
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "recyclableRender", function() { return _C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_16_0_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_template_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_uni_app_loader_page_meta_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_template_id_34dbd744_scoped_true___WEBPACK_IMPORTED_MODULE_0__["recyclableRender"]; });
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "components", function() { return _C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_16_0_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_template_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_uni_app_loader_page_meta_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_template_id_34dbd744_scoped_true___WEBPACK_IMPORTED_MODULE_0__["components"]; });
/***/ }),
/***/ 355:
/*!****************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/@dcloudio/vue-cli-plugin-uni/packages/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/webpack-preprocess-loader??ref--16-0!./node_modules/@dcloudio/webpack-uni-mp-loader/lib/template.js!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/webpack-uni-app-loader/page-meta.js!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/vue-loader/lib??vue-loader-options!./node_modules/@dcloudio/webpack-uni-mp-loader/lib/style.js!D:/workspace/course/course_miniweixin/pages/home/allClass.vue?vue&type=template&id=34dbd744&scoped=true& ***!
\****************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************/
/*! exports provided: render, staticRenderFns, recyclableRender, components */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "render", function() { return render; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "staticRenderFns", function() { return staticRenderFns; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "recyclableRender", function() { return recyclableRender; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "components", function() { return components; });
var components
var render = function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
}
var recyclableRender = false
var staticRenderFns = []
render._withStripped = true
/***/ }),
/***/ 356:
/*!**********************************************************************************************!*\
!*** D:/workspace/course/course_miniweixin/pages/home/allClass.vue?vue&type=script&lang=js& ***!
\**********************************************************************************************/
/*! no static exports found */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _C_HBuilderX_plugins_uniapp_cli_node_modules_babel_loader_lib_index_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_12_1_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_script_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!./node_modules/babel-loader/lib!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/webpack-preprocess-loader??ref--12-1!./node_modules/@dcloudio/webpack-uni-mp-loader/lib/script.js!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/vue-loader/lib??vue-loader-options!./node_modules/@dcloudio/webpack-uni-mp-loader/lib/style.js!./allClass.vue?vue&type=script&lang=js& */ 357);
/* harmony import */ var _C_HBuilderX_plugins_uniapp_cli_node_modules_babel_loader_lib_index_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_12_1_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_script_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_C_HBuilderX_plugins_uniapp_cli_node_modules_babel_loader_lib_index_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_12_1_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_script_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__);
/* harmony reexport (unknown) */ for(var __WEBPACK_IMPORT_KEY__ in _C_HBuilderX_plugins_uniapp_cli_node_modules_babel_loader_lib_index_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_12_1_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_script_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__) if(__WEBPACK_IMPORT_KEY__ !== 'default') (function(key) { __webpack_require__.d(__webpack_exports__, key, function() { return _C_HBuilderX_plugins_uniapp_cli_node_modules_babel_loader_lib_index_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_12_1_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_script_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__[key]; }) }(__WEBPACK_IMPORT_KEY__));
/* harmony default export */ __webpack_exports__["default"] = (_C_HBuilderX_plugins_uniapp_cli_node_modules_babel_loader_lib_index_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_12_1_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_script_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0___default.a);
/***/ }),
/***/ 357:
/*!*****************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/webpack-preprocess-loader??ref--12-1!./node_modules/@dcloudio/webpack-uni-mp-loader/lib/script.js!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/vue-loader/lib??vue-loader-options!./node_modules/@dcloudio/webpack-uni-mp-loader/lib/style.js!D:/workspace/course/course_miniweixin/pages/home/allClass.vue?vue&type=script&lang=js& ***!
\*****************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
/* WEBPACK VAR INJECTION */(function(uni) {Object.defineProperty(exports, "__esModule", { value: true });exports.default = void 0;var navBar = function navBar() {__webpack_require__.e(/*! require.ensure | components/nav-bar */ "components/nav-bar").then((function () {return resolve(__webpack_require__(/*! @/components/nav-bar.vue */ 477));}).bind(null, __webpack_require__)).catch(__webpack_require__.oe);};var customHeader = function customHeader() {__webpack_require__.e(/*! require.ensure | components/custom-header/custom-header */ "components/custom-header/custom-header").then((function () {return resolve(__webpack_require__(/*! @/components/custom-header/custom-header.vue */ 456));}).bind(null, __webpack_require__)).catch(__webpack_require__.oe);};var _default =
{
components: {
navBar: navBar,
customHeader: customHeader },
data: function data() {
return {
title: '选课中心',
list: [], //页面展示数据
inputVlaue: '', // 搜索值
hasNextPage: true, // 是否有下一页
pages: 1, //第几页
pageSize: 50 //每页的个数
};
},
onLoad: function onLoad(options) {
this.getClassList();
},
onReachBottom: function onReachBottom() {
if (this.hasNextPage) {
this.pages++;
this.getClassList();
}
},
methods: {
getBtnserch: function getBtnserch() {
this.list = [];
this.getClassList();
},
goClassDetail: function goClassDetail(classid) {
uni.navigateTo({
url: './allDetail?clssid=' + classid + '&ids=' + '' + '&start=' + '' + '&end=' + '' +
'&isClass=' + '1002' });
},
// 获取课程列表
getClassList: function getClassList() {var _this = this;
var that = this;
that.$request({
url: 'course/v2/exercise/list',
data: {
key: that.inputVlaue,
pageNo: this.pages,
pageSize: this.pageSize },
method: 'get' }).
then(function (res) {
if (res.data.success) {
var datalist = res.data.result.records;
if (datalist.length < _this.pageSize) {
that.hasNextPage = false;
}
that.list = that.list.concat(datalist);
}
}).catch(function (err) {
});
} } };exports.default = _default;
/* WEBPACK VAR INJECTION */}.call(this, __webpack_require__(/*! ./node_modules/@dcloudio/uni-mp-weixin/dist/index.js */ 1)["default"]))
/***/ }),
/***/ 358:
/*!*******************************************************************************************************************************!*\
!*** D:/workspace/course/course_miniweixin/pages/home/allClass.vue?vue&type=style&index=0&id=34dbd744&lang=scss&scoped=true& ***!
\*******************************************************************************************************************************/
/*! no static exports found */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _C_HBuilderX_plugins_uniapp_cli_node_modules_mini_css_extract_plugin_dist_loader_js_ref_8_oneOf_1_0_C_HBuilderX_plugins_uniapp_cli_node_modules_css_loader_dist_cjs_js_ref_8_oneOf_1_1_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_loaders_stylePostLoader_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_8_oneOf_1_2_C_HBuilderX_plugins_uniapp_cli_node_modules_postcss_loader_src_index_js_ref_8_oneOf_1_3_C_HBuilderX_plugins_uniapp_cli_node_modules_sass_loader_dist_cjs_js_ref_8_oneOf_1_4_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_8_oneOf_1_5_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_style_index_0_id_34dbd744_lang_scss_scoped_true___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!./node_modules/mini-css-extract-plugin/dist/loader.js??ref--8-oneOf-1-0!./node_modules/css-loader/dist/cjs.js??ref--8-oneOf-1-1!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/vue-loader/lib/loaders/stylePostLoader.js!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/webpack-preprocess-loader??ref--8-oneOf-1-2!./node_modules/postcss-loader/src??ref--8-oneOf-1-3!./node_modules/sass-loader/dist/cjs.js??ref--8-oneOf-1-4!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/webpack-preprocess-loader??ref--8-oneOf-1-5!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/vue-loader/lib??vue-loader-options!./node_modules/@dcloudio/webpack-uni-mp-loader/lib/style.js!./allClass.vue?vue&type=style&index=0&id=34dbd744&lang=scss&scoped=true& */ 359);
/* harmony import */ var _C_HBuilderX_plugins_uniapp_cli_node_modules_mini_css_extract_plugin_dist_loader_js_ref_8_oneOf_1_0_C_HBuilderX_plugins_uniapp_cli_node_modules_css_loader_dist_cjs_js_ref_8_oneOf_1_1_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_loaders_stylePostLoader_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_8_oneOf_1_2_C_HBuilderX_plugins_uniapp_cli_node_modules_postcss_loader_src_index_js_ref_8_oneOf_1_3_C_HBuilderX_plugins_uniapp_cli_node_modules_sass_loader_dist_cjs_js_ref_8_oneOf_1_4_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_8_oneOf_1_5_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_style_index_0_id_34dbd744_lang_scss_scoped_true___WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_C_HBuilderX_plugins_uniapp_cli_node_modules_mini_css_extract_plugin_dist_loader_js_ref_8_oneOf_1_0_C_HBuilderX_plugins_uniapp_cli_node_modules_css_loader_dist_cjs_js_ref_8_oneOf_1_1_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_loaders_stylePostLoader_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_8_oneOf_1_2_C_HBuilderX_plugins_uniapp_cli_node_modules_postcss_loader_src_index_js_ref_8_oneOf_1_3_C_HBuilderX_plugins_uniapp_cli_node_modules_sass_loader_dist_cjs_js_ref_8_oneOf_1_4_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_8_oneOf_1_5_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_style_index_0_id_34dbd744_lang_scss_scoped_true___WEBPACK_IMPORTED_MODULE_0__);
/* harmony reexport (unknown) */ for(var __WEBPACK_IMPORT_KEY__ in _C_HBuilderX_plugins_uniapp_cli_node_modules_mini_css_extract_plugin_dist_loader_js_ref_8_oneOf_1_0_C_HBuilderX_plugins_uniapp_cli_node_modules_css_loader_dist_cjs_js_ref_8_oneOf_1_1_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_loaders_stylePostLoader_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_8_oneOf_1_2_C_HBuilderX_plugins_uniapp_cli_node_modules_postcss_loader_src_index_js_ref_8_oneOf_1_3_C_HBuilderX_plugins_uniapp_cli_node_modules_sass_loader_dist_cjs_js_ref_8_oneOf_1_4_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_8_oneOf_1_5_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_style_index_0_id_34dbd744_lang_scss_scoped_true___WEBPACK_IMPORTED_MODULE_0__) if(__WEBPACK_IMPORT_KEY__ !== 'default') (function(key) { __webpack_require__.d(__webpack_exports__, key, function() { return _C_HBuilderX_plugins_uniapp_cli_node_modules_mini_css_extract_plugin_dist_loader_js_ref_8_oneOf_1_0_C_HBuilderX_plugins_uniapp_cli_node_modules_css_loader_dist_cjs_js_ref_8_oneOf_1_1_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_loaders_stylePostLoader_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_8_oneOf_1_2_C_HBuilderX_plugins_uniapp_cli_node_modules_postcss_loader_src_index_js_ref_8_oneOf_1_3_C_HBuilderX_plugins_uniapp_cli_node_modules_sass_loader_dist_cjs_js_ref_8_oneOf_1_4_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_8_oneOf_1_5_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_style_index_0_id_34dbd744_lang_scss_scoped_true___WEBPACK_IMPORTED_MODULE_0__[key]; }) }(__WEBPACK_IMPORT_KEY__));
/* harmony default export */ __webpack_exports__["default"] = (_C_HBuilderX_plugins_uniapp_cli_node_modules_mini_css_extract_plugin_dist_loader_js_ref_8_oneOf_1_0_C_HBuilderX_plugins_uniapp_cli_node_modules_css_loader_dist_cjs_js_ref_8_oneOf_1_1_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_loaders_stylePostLoader_js_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_8_oneOf_1_2_C_HBuilderX_plugins_uniapp_cli_node_modules_postcss_loader_src_index_js_ref_8_oneOf_1_3_C_HBuilderX_plugins_uniapp_cli_node_modules_sass_loader_dist_cjs_js_ref_8_oneOf_1_4_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_webpack_preprocess_loader_index_js_ref_8_oneOf_1_5_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_vue_cli_plugin_uni_packages_vue_loader_lib_index_js_vue_loader_options_C_HBuilderX_plugins_uniapp_cli_node_modules_dcloudio_webpack_uni_mp_loader_lib_style_js_allClass_vue_vue_type_style_index_0_id_34dbd744_lang_scss_scoped_true___WEBPACK_IMPORTED_MODULE_0___default.a);
/***/ }),
/***/ 359:
/*!*********************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/mini-css-extract-plugin/dist/loader.js??ref--8-oneOf-1-0!./node_modules/css-loader/dist/cjs.js??ref--8-oneOf-1-1!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/vue-loader/lib/loaders/stylePostLoader.js!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/webpack-preprocess-loader??ref--8-oneOf-1-2!./node_modules/postcss-loader/src??ref--8-oneOf-1-3!./node_modules/sass-loader/dist/cjs.js??ref--8-oneOf-1-4!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/webpack-preprocess-loader??ref--8-oneOf-1-5!./node_modules/@dcloudio/vue-cli-plugin-uni/packages/vue-loader/lib??vue-loader-options!./node_modules/@dcloudio/webpack-uni-mp-loader/lib/style.js!D:/workspace/course/course_miniweixin/pages/home/allClass.vue?vue&type=style&index=0&id=34dbd744&lang=scss&scoped=true& ***!
\*********************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
// extracted by mini-css-extract-plugin
if(false) { var cssReload; }
/***/ })
},[[352,"common/runtime","common/vendor"]]]);
//# sourceMappingURL=../../../.sourcemap/mp-weixin/pages/home/allClass.js.map |
import React from 'react'
import { View, StatusBar } from 'react-native'
import Constants from 'expo-constants'
export default function AppStatusBar ({ backgroundColor, ...props }) {
return (
<View style={{backgroundColor, height: Constants.statusBarHeight }}>
<StatusBar
translucent backgroundColor={backgroundColor} {...props}
/>
</View>
)
}
|
import json
import os
import re
from unittest import mock
import tempfile
from dev import update_ml_package_versions
class MockResponse:
def __init__(self, body):
self.body = json.dumps(body).encode("utf-8")
def read(self):
return self.body
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
@classmethod
def from_versions(cls, versions):
return cls({"releases": {v: [f'{v}.whl'] for v in versions}})
def run_test(src, src_expected, mock_responses):
def patch_urlopen(url):
package_name = re.search(r"https://pypi.python.org/pypi/(.+)/json", url).group(1)
return mock_responses[package_name]
with tempfile.TemporaryDirectory() as tmpdir:
tmp_path = os.path.join(tmpdir, "versions.yml")
with open(tmp_path, "w") as f:
f.write(src)
with mock.patch("urllib.request.urlopen", new=patch_urlopen):
update_ml_package_versions.main(["--path", tmp_path])
with open(tmp_path) as f:
assert f.read() == src_expected
def test_multiple_flavors_are_correctly_updated():
src = """
sklearn:
package_info:
pip_release: sklearn
autologging:
maximum: "0.0.1"
xgboost:
package_info:
pip_release: xgboost
autologging:
maximum: "0.1.1"
"""
mock_responses = {
"sklearn": MockResponse.from_versions(["0.0.2"]),
"xgboost": MockResponse.from_versions(["0.1.2"]),
}
src_expected = """
sklearn:
package_info:
pip_release: sklearn
autologging:
maximum: "0.0.2"
xgboost:
package_info:
pip_release: xgboost
autologging:
maximum: "0.1.2"
"""
run_test(src, src_expected, mock_responses)
def test_both_models_and_autologging_are_updated():
src = """
sklearn:
package_info:
pip_release: sklearn
models:
maximum: "0.0.1"
autologging:
maximum: "0.0.1"
"""
mock_responses = {
"sklearn": MockResponse.from_versions(["0.0.2"]),
}
src_expected = """
sklearn:
package_info:
pip_release: sklearn
models:
maximum: "0.0.2"
autologging:
maximum: "0.0.2"
"""
run_test(src, src_expected, mock_responses)
def test_pre_and_dev_versions_are_ignored():
src = """
sklearn:
package_info:
pip_release: sklearn
autologging:
maximum: "0.0.1"
"""
mock_responses = {
"sklearn": MockResponse.from_versions(
[
# pre-release and dev-release should be filtered out
"0.0.3.rc1", # pre-release
"0.0.3.dev1", # dev-release
"0.0.2.post", # post-release
"0.0.2", # final release
]
),
}
src_expected = """
sklearn:
package_info:
pip_release: sklearn
autologging:
maximum: "0.0.2.post"
"""
run_test(src, src_expected, mock_responses)
def test_unsupported_versions_are_ignored():
src = """
sklearn:
package_info:
pip_release: sklearn
autologging:
unsupported: ["0.0.3"]
maximum: "0.0.1"
"""
mock_responses = {"sklearn": MockResponse.from_versions(["0.0.2", "0.0.3"])}
src_expected = """
sklearn:
package_info:
pip_release: sklearn
autologging:
unsupported: ["0.0.3"]
maximum: "0.0.2"
"""
run_test(src, src_expected, mock_responses)
def test_freeze_field_prevents_updating_maximum_version():
src = """
sklearn:
package_info:
pip_release: sklearn
autologging:
pin_maximum: True
maximum: "0.0.1"
"""
mock_responses = {"sklearn": MockResponse.from_versions(["0.0.2"])}
src_expected = """
sklearn:
package_info:
pip_release: sklearn
autologging:
pin_maximum: True
maximum: "0.0.1"
"""
run_test(src, src_expected, mock_responses)
|
import Activity from '../activity/Activity';
import {cloneContent} from '../messageHelper';
export default function InclusiveGateway(activityDef, context) {
return new Activity(InclusiveGatewayBehaviour, activityDef, context);
}
export function InclusiveGatewayBehaviour(activity) {
const {id, type, broker} = activity;
this.id = id;
this.type = type;
this.broker = broker;
}
InclusiveGatewayBehaviour.prototype.execute = function execute({content}) {
this.broker.publish('execution', 'execute.completed', cloneContent(content));
};
|
#!/usr/bin/env osascript -l JavaScript
ObjC.import("stdlib");
ObjC.import("Foundation");
const app = Application.currentApplication();
app.includeStandardAdditions = true;
function readFile (path, encoding) {
if (!encoding) encoding = $.NSUTF8StringEncoding;
const fm = $.NSFileManager.defaultManager;
const data = fm.contentsAtPath(path);
const str = $.NSString.alloc.initWithDataEncoding(data, encoding);
return ObjC.unwrap(str);
}
const alfredMatcher = (str) => str.replace (/[-()_.]/g, " ") + " " + str + " ";
const baseURL = "https://forum.obsidian.md/c/";
const discordReadyLinks = Application("Discord").frontmost();
const jsonArray = [];
const forumDataJSON =
$.getenv("alfred_preferences") + "/workflows/"
+ $.getenv("alfred_workflow_uid")
+ "/data/forum-categories.json";
const workArray = JSON.parse(readFile(forumDataJSON));
workArray.forEach(category => {
const url = baseURL + category.browseURL;
let shareURL = url;
let discordReady = "";
if (discordReadyLinks) {
shareURL = "<" + url + ">";
discordReady = " (discord-ready)";
}
const forumCatID = category.browseURL.split("/")[0];
jsonArray.push({
"title": category.name,
"subtitle": category.description,
"match": alfredMatcher (category.name),
"arg": forumCatID,
"mods": {
"cmd": {
"arg": url,
"subtitle": "⌘: Open Forum Category",
},
"alt": {
"arg": shareURL,
"subtitle": "⌥: Copy URL to Forum Category" + discordReady,
},
},
});
});
JSON.stringify({ items: jsonArray });
|
/* eslint-disable require-jsdoc */
const jwt = require('jsonwebtoken');
const HttpStatus = require('../HttpStatus');
const config = require('../config');
function getTokenFromHeaders(req) {
const token =
req.body.token || req.query.token || req.headers['master-token'];
if (!token) return '';
return token;
}
function validateToken(req, res, next) {
const token = getTokenFromHeaders(req);
if (token) {
jwt.verify(token, config.JWTSecret, (error, decoded) => {
if (error) {
return res.json({
success: false,
message: 'Failed to authenticate token.',
});
} else {
req.decoded = decoded;
next();
}
});
} else {
return res.status(HttpStatus.forbidden).send({
success: false,
message: 'No token provided.',
});
}
}
module.exports = validateToken;
|
__version__ = "0.1.0"
from mkninja._core import add_target_object, _Project, Target
|
import ujson
EVENT_PRE_TRIP_START = b"liftoff/trip/start/pre"
EVENT_TRIP_START = b"liftoff/trip/start"
EVENT_POST_TRIP_START = b"liftoff/trip/start/post"
EVENT_TRIP_READY = b"liftoff/trip/ready"
EVENT_PRE_TRIP_END = b"liftoff/trip/end/pre"
EVENT_TRIP_END = b"liftoff/trip/end/post"
EVENT_POST_TRIP_END = b"liftoff/post/trip/end"
#
EVENT_UPDATE_NEXT_QUEUE = b"liftoff/update/next"
EVENT_UPDATE_FLOOR = b"liftoff/update/floor"
EVENT_MOVEMENT_UPDATE = b"liftoff/move/to"
EVENT_POWER_UPDATE = b"liftoff/power"
EVENT_POWER_TRACK_DONE = b"liftoff/power/tracked"
EVENT_ID_CHECK = b"liftoff/id/tracked"
class EventPayload:
def __init__(self, transaction_code, payload):
self.code = transaction_code
self.payload = payload
@property
def json(self):
"""returns json string"""
data = {}
if self.code:
data["id"] = self.code
for key, value in self.payload.items():
data[key] = value
return ujson.dumps(data)
class EventFactory:
@classmethod
def create_event(cls, source_id, transaction_code, payload):
payload['source'] = source_id
return EventPayload(transaction_code, payload).json
|
# Generated by Django 3.2.7 on 2021-11-15 17:12
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('melodic', '0003_remove_track_author'),
]
operations = [
migrations.AddField(
model_name='comment',
name='created_at',
field=models.DateTimeField(auto_now_add=True, default=None),
preserve_default=False,
),
migrations.AddField(
model_name='comment',
name='track',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='melodic.track'),
preserve_default=False,
),
]
|
import logging
import json
import time
import io
from unittest import mock, skip
from django.test import TestCase, TransactionTestCase, tag
from django.urls import reverse
from django.contrib.auth.models import User
from django.conf import settings
from rest_framework import status
from celery.contrib.testing.worker import start_worker
from core.celery import app as celery_app
from core.celery import task_routes
from core.swiftmanager import SwiftManager
from plugins.models import PluginMeta, Plugin, PluginParameter, ComputeResource
from plugininstances.models import PluginInstance, PluginInstanceFile
from plugininstances.models import PathParameter, FloatParameter
from plugininstances.services.manager import PluginInstanceManager
from plugininstances import views
COMPUTE_RESOURCE_URL = settings.COMPUTE_RESOURCE_URL
class ViewTests(TestCase):
def setUp(self):
# avoid cluttered console output (for instance logging all the http requests)
logging.disable(logging.WARNING)
self.chris_username = 'chris'
self.chris_password = 'chris12'
self.username = 'foo'
self.password = 'bar'
self.other_username = 'boo'
self.other_password = 'far'
self.content_type='application/vnd.collection+json'
(self.compute_resource, tf) = ComputeResource.objects.get_or_create(
name="host", compute_url=COMPUTE_RESOURCE_URL)
# create the chris superuser and two additional users
User.objects.create_user(username=self.chris_username,
password=self.chris_password)
User.objects.create_user(username=self.other_username,
password=self.other_password)
User.objects.create_user(username=self.username,
password=self.password)
# create two plugins
(pl_meta, tf) = PluginMeta.objects.get_or_create(name='pacspull', type='fs')
(plugin_fs, tf) = Plugin.objects.get_or_create(meta=pl_meta, version='0.1')
plugin_fs.compute_resources.set([self.compute_resource])
plugin_fs.save()
(pl_meta, tf) = PluginMeta.objects.get_or_create(name='mri_convert', type='ds')
(plugin_ds, tf) = Plugin.objects.get_or_create(meta=pl_meta, version='0.1')
plugin_ds.compute_resources.set([self.compute_resource])
plugin_ds.save()
def tearDown(self):
# re-enable logging
logging.disable(logging.NOTSET)
class TasksViewTests(TransactionTestCase):
@classmethod
def setUpClass(cls):
logging.disable(logging.WARNING)
super().setUpClass()
# route tasks to this worker by using the default 'celery' queue
# that is exclusively used for the automated tests
celery_app.conf.update(task_routes=None)
cls.celery_worker = start_worker(celery_app,
concurrency=1,
perform_ping_check=False)
cls.celery_worker.__enter__()
@classmethod
def tearDownClass(cls):
super().tearDownClass()
cls.celery_worker.__exit__(None, None, None)
# reset routes to the original queues
celery_app.conf.update(task_routes=task_routes)
logging.disable(logging.NOTSET)
def setUp(self):
self.swift_manager = SwiftManager(settings.SWIFT_CONTAINER_NAME,
settings.SWIFT_CONNECTION_PARAMS)
self.chris_username = 'chris'
self.chris_password = 'chris12'
self.username = 'foo'
self.password = 'bar'
self.other_username = 'boo'
self.other_password = 'far'
self.content_type = 'application/vnd.collection+json'
(self.compute_resource, tf) = ComputeResource.objects.get_or_create(
name="host", compute_url=COMPUTE_RESOURCE_URL)
# create the chris superuser and two additional users
User.objects.create_user(username=self.chris_username,
password=self.chris_password)
User.objects.create_user(username=self.other_username,
password=self.other_password)
user = User.objects.create_user(username=self.username,
password=self.password)
# create two plugins
(pl_meta, tf) = PluginMeta.objects.get_or_create(name='pacspull', type='fs')
(plugin_fs, tf) = Plugin.objects.get_or_create(meta=pl_meta, version='0.1')
plugin_fs.compute_resources.set([self.compute_resource])
plugin_fs.save()
(pl_meta, tf) = PluginMeta.objects.get_or_create(name='mri_convert', type='ds')
(plugin_ds, tf) = Plugin.objects.get_or_create(meta=pl_meta, version='0.1')
plugin_ds.compute_resources.set([self.compute_resource])
plugin_ds.save()
# create pacspull fs plugin instance
(self.pl_inst, tf) = PluginInstance.objects.get_or_create(
plugin=plugin_fs, owner=user,
compute_resource=plugin_fs.compute_resources.all()[0])
# create mri_convert ds plugin instance
PluginInstance.objects.get_or_create(
plugin=plugin_ds, owner=user, previous=self.pl_inst,
compute_resource=plugin_ds.compute_resources.all()[0])
class PluginInstanceListViewTests(TasksViewTests):
"""
Test the plugininstance-list view.
"""
def setUp(self):
super(PluginInstanceListViewTests, self).setUp()
plugin = Plugin.objects.get(meta__name="pacspull")
self.create_read_url = reverse("plugininstance-list", kwargs={"pk": plugin.id})
self.user_space_path = '%s/uploads/' % self.username
self.post = json.dumps(
{"template": {"data": [{"name": "dir", "value": self.user_space_path},
{"name": "title", "value": 'test1'}]}})
def test_plugin_instance_create_success(self):
# add parameters to the plugin before the POST request
plugin = Plugin.objects.get(meta__name="pacspull")
PluginParameter.objects.get_or_create(plugin=plugin, name='dir', type='string',
optional=False)
# first test 'fs' plugin instance (has no previous plugin instance)
with mock.patch.object(views.run_plugin_instance, 'delay',
return_value=None) as delay_mock:
# make API request
self.client.login(username=self.username, password=self.password)
response = self.client.post(self.create_read_url, data=self.post,
content_type=self.content_type)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# check that the run_plugin_instance task was called with appropriate args
delay_mock.assert_called_with(response.data['id'])
self.assertEqual(response.data['status'], 'scheduled')
# now test 'ds' plugin instance (has previous plugin instance)
previous_plg_inst = PluginInstance.objects.get(title='test1')
plugin = Plugin.objects.get(meta__name="mri_convert")
create_read_url = reverse("plugininstance-list", kwargs={"pk": plugin.id})
post = json.dumps(
{"template": {"data": [{"name": "previous_id", "value": previous_plg_inst.id}]}})
previous_plg_inst.status = 'finishedSuccessfully'
previous_plg_inst.save()
with mock.patch.object(views.run_plugin_instance, 'delay',
return_value=None) as delay_mock:
self.client.login(username=self.username, password=self.password)
response = self.client.post(create_read_url, data=post,
content_type=self.content_type)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# check that the run_plugin_instance task was called with appropriate args
delay_mock.assert_called_with(response.data['id'])
self.assertEqual(response.data['status'], 'scheduled')
previous_plg_inst.status = 'started'
previous_plg_inst.save()
with mock.patch.object(views.run_plugin_instance, 'delay',
return_value=None) as delay_mock:
self.client.login(username=self.username, password=self.password)
response = self.client.post(create_read_url, data=post,
content_type=self.content_type)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# check that the run_plugin_instance task was not called
delay_mock.assert_not_called()
self.assertEqual(response.data['status'], 'waitingForPrevious')
previous_plg_inst.status = 'finishedWithError'
previous_plg_inst.save()
with mock.patch.object(views.run_plugin_instance, 'delay',
return_value=None) as delay_mock:
self.client.login(username=self.username, password=self.password)
response = self.client.post(create_read_url, data=post,
content_type=self.content_type)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# check that the run_plugin_instance task was not called
delay_mock.assert_not_called()
self.assertEqual(response.data['status'], 'cancelled')
@tag('integration')
def test_integration_plugin_instance_create_success(self):
# add an FS plugin to the system
plugin_parameters = [{'name': 'dir', 'type': 'path', 'action': 'store',
'optional': False, 'flag': '--dir', 'short_flag': '-d',
'help': 'test plugin', 'ui_exposed': True}]
self.plg_data = {'description': 'A simple chris fs app demo',
'version': '0.1',
'dock_image': 'fnndsc/pl-simplefsapp',
'execshell': 'python3',
'selfpath': '/usr/src/simplefsapp',
'selfexec': 'simplefsapp.py'}
self.plg_meta_data = {'name': 'simplefsapp',
'title': 'Dir plugin',
'license': 'MIT',
'type': 'fs',
'icon': 'http://github.com/plugin',
'category': 'Dir',
'stars': 0,
'authors': 'FNNDSC ([email protected])'}
self.plugin_repr = self.plg_data.copy()
self.plugin_repr.update(self.plg_meta_data)
self.plugin_repr['parameters'] = plugin_parameters
(compute_resource, tf) = ComputeResource.objects.get_or_create(
name="host", compute_url=COMPUTE_RESOURCE_URL)
data = self.plg_meta_data.copy()
(pl_meta, tf) = PluginMeta.objects.get_or_create(**data)
data = self.plg_data.copy()
(plugin, tf) = Plugin.objects.get_or_create(meta=pl_meta, **data)
plugin.compute_resources.set([compute_resource])
plugin.save()
# add plugin's parameters
parameters = plugin_parameters
PluginParameter.objects.get_or_create(
plugin=plugin,
name=parameters[0]['name'],
type=parameters[0]['type'],
flag=parameters[0]['flag'])
# upload a file to the Swift storage user's space
with io.StringIO('Test file') as f:
self.swift_manager.upload_obj(self.user_space_path + 'test.txt', f.read(),
content_type='text/plain')
# make POST API request to create a plugin instance
create_read_url = reverse("plugininstance-list", kwargs={"pk": plugin.id})
self.client.login(username=self.username, password=self.password)
response = self.client.post(create_read_url, data=self.post,
content_type=self.content_type)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# delete files from swift storage
self.swift_manager.delete_obj(self.user_space_path + 'test.txt')
def test_plugin_instance_create_failure_unauthenticated(self):
response = self.client.post(self.create_read_url, data=self.post,
content_type=self.content_type)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_plugin_instance_list_success(self):
self.client.login(username=self.username, password=self.password)
response = self.client.get(self.create_read_url)
self.assertContains(response, "pacspull")
def test_plugin_instance_list_failure_unauthenticated(self):
response = self.client.get(self.create_read_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class PluginInstanceDetailViewTests(TasksViewTests):
"""
Test the plugininstance-detail view.
"""
def setUp(self):
super(PluginInstanceDetailViewTests, self).setUp()
self.read_update_delete_url = reverse("plugininstance-detail",
kwargs={"pk": self.pl_inst.id})
def test_plugin_instance_detail_success(self):
self.pl_inst.status = 'started'
self.pl_inst.save()
with mock.patch.object(views.check_plugin_instance_exec_status, 'delay',
return_value=None) as delay_mock:
# make API request
self.client.login(username=self.username, password=self.password)
response = self.client.get(self.read_update_delete_url)
self.assertContains(response, "pacspull")
self.assertEqual(response.data['status'], 'started')
# check that the check_plugin_instance_exec_status task was called with appropriate args
delay_mock.assert_called_with(self.pl_inst.id)
@tag('integration', 'error-pman')
def test_integration_plugin_instance_detail_success(self):
# add an FS plugin to the system
plugin_parameters = [{'name': 'dir', 'type': 'path', 'action': 'store',
'optional': False, 'flag': '--dir', 'short_flag': '-d',
'help': 'test plugin', 'ui_exposed': True}]
self.plg_data = {'description': 'A simple chris fs app demo',
'version': '0.1',
'dock_image': 'fnndsc/pl-simplefsapp',
'execshell': 'python3',
'selfpath': '/usr/src/simplefsapp',
'selfexec': 'simplefsapp.py'}
self.plg_meta_data = {'name': 'simplefsapp',
'title': 'Dir plugin',
'license': 'MIT',
'type': 'fs',
'icon': 'http://github.com/plugin',
'category': 'Dir',
'stars': 0,
'authors': 'FNNDSC ([email protected])'}
self.plugin_repr = self.plg_data.copy()
self.plugin_repr.update(self.plg_meta_data)
self.plugin_repr['parameters'] = plugin_parameters
(compute_resource, tf) = ComputeResource.objects.get_or_create(
name="host", compute_url=COMPUTE_RESOURCE_URL)
data = self.plg_meta_data.copy()
(pl_meta, tf) = PluginMeta.objects.get_or_create(**data)
data = self.plg_data.copy()
(plugin, tf) = Plugin.objects.get_or_create(meta=pl_meta, **data)
plugin.compute_resources.set([compute_resource])
plugin.save()
# add plugin's parameters
parameters = plugin_parameters
(pl_param, tf) = PluginParameter.objects.get_or_create(
plugin=plugin,
name=parameters[0]['name'],
type=parameters[0]['type'],
flag=parameters[0]['flag'])
# upload a file to the Swift storage user's space
user_space_path = '%s/uploads/' % self.username
with io.StringIO('Test file') as f:
self.swift_manager.upload_obj(user_space_path + 'test.txt', f.read(),
content_type='text/plain')
# create a plugin's instance
user = User.objects.get(username=self.username)
(pl_inst, tf) = PluginInstance.objects.get_or_create(
title='test2', plugin=plugin,
owner=user, compute_resource=plugin.compute_resources.all()[0])
pl_inst.status = 'scheduled'
pl_inst.save()
PathParameter.objects.get_or_create(plugin_inst=pl_inst, plugin_param=pl_param,
value=user_space_path)
read_update_delete_url = reverse("plugininstance-detail",
kwargs={"pk": pl_inst.id})
# run the plugin instance
plg_inst_manager = PluginInstanceManager(pl_inst)
plg_inst_manager.run_plugin_instance_app()
# make API GET request
self.client.login(username=self.username, password=self.password)
response = self.client.get(read_update_delete_url)
self.assertContains(response, "simplefsapp")
self.assertContains(response, 'started')
# In the following we keep checking the status until the job ends with
# 'finishedSuccessfully'. The code runs in a lazy loop poll with a
# max number of attempts at 10 second intervals.
maxLoopTries = 20
currentLoop = 1
b_checkAgain = True
time.sleep(10)
while b_checkAgain:
response = self.client.get(read_update_delete_url)
str_responseStatus = response.data['status']
if str_responseStatus == 'finishedSuccessfully':
b_checkAgain = False
elif currentLoop < maxLoopTries:
time.sleep(10)
if currentLoop == maxLoopTries:
b_checkAgain = False
currentLoop += 1
self.assertContains(response, "finishedSuccessfully")
# delete files from swift storage
self.swift_manager.delete_obj(user_space_path + 'test.txt')
# obj_paths = self.swift_manager.ls(pl_inst.get_output_path())
# for path in obj_paths:
# self.swift_manager.delete_obj(path)
def test_plugin_instance_detail_failure_unauthenticated(self):
response = self.client.get(self.read_update_delete_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_plugin_instance_update_success(self):
put = json.dumps({
"template": {"data": [{"name": "title", "value": "Test instance"},
{"name": "status", "value": "cancelled"}]}})
self.client.login(username=self.username, password=self.password)
response = self.client.put(self.read_update_delete_url, data=put,
content_type=self.content_type)
self.assertContains(response, "Test instance")
self.assertContains(response, "cancelled")
def test_plugin_instance_update_failure_current_status_is_finishedSuccessfully_or_finishedWithError(self):
put = json.dumps({
"template": {"data": [{"name": "status", "value": "cancelled"}]}})
self.pl_inst.status = 'finishedSuccessfully'
self.pl_inst.save()
self.client.login(username=self.username, password=self.password)
response = self.client.put(self.read_update_delete_url, data=put,
content_type=self.content_type)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.pl_inst.status = 'finishedWithError'
self.pl_inst.save()
self.client.login(username=self.username, password=self.password)
response = self.client.put(self.read_update_delete_url, data=put,
content_type=self.content_type)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_plugin_instance_update_failure_status_can_only_be_changed_to_cancelled(self):
put = json.dumps({
"template": {"data": [{"name": "status", "value": "finishedSuccessfully"}]}})
self.client.login(username=self.username, password=self.password)
response = self.client.put(self.read_update_delete_url, data=put,
content_type=self.content_type)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_plugin_instance_update_failure_unauthenticated(self):
response = self.client.put(self.read_update_delete_url, data={},
content_type=self.content_type)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_plugin_instance_update_failure_access_denied(self):
put = json.dumps({
"template": {"data": [{"name": "status", "value": "cancelled"}]}})
self.client.login(username=self.other_username, password=self.other_password)
response = self.client.put(self.read_update_delete_url, data=put,
content_type=self.content_type)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_plugin_instance_delete_success(self):
self.client.login(username=self.username, password=self.password)
response = self.client.delete(self.read_update_delete_url)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(PluginInstance.objects.count(), 0)
def test_plugin_instance_delete_failure_unauthenticated(self):
response = self.client.delete(self.read_update_delete_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_plugin_instance_delete_failure_access_denied(self):
self.client.login(username=self.other_username, password=self.other_password)
response = self.client.delete(self.read_update_delete_url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class PluginInstanceListQuerySearchViewTests(ViewTests):
"""
Test the plugininstance-list-query-search view.
"""
def setUp(self):
super(PluginInstanceListQuerySearchViewTests, self).setUp()
user = User.objects.get(username=self.username)
# create two plugin instances
plugin = Plugin.objects.get(meta__name="pacspull")
(inst, tf) = PluginInstance.objects.get_or_create(
plugin=plugin, owner=user, compute_resource=plugin.compute_resources.all()[0])
plugin = Plugin.objects.get(meta__name="mri_convert")
(inst, tf) = PluginInstance.objects.get_or_create(
plugin=plugin, owner=user, previous=inst,
compute_resource=plugin.compute_resources.all()[0])
# set second instance's status
inst.status = 'finishedSuccessfully'
inst.save()
self.list_url = reverse("allplugininstance-list-query-search") + '?status=created'
def test_plugin_instance_query_search_list_success(self):
self.client.login(username=self.username, password=self.password)
response = self.client.get(self.list_url)
# response should only contain the instances that match the query
self.assertContains(response, 'created')
self.assertNotContains(response,'finishedSuccessfully')
def test_plugin_instance_query_search_list_failure_unauthenticated(self):
response = self.client.get(self.list_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class PluginInstanceDescendantListViewTests(ViewTests):
"""
Test the plugininstance-descendant-list view.
"""
def setUp(self):
super(PluginInstanceDescendantListViewTests, self).setUp()
user = User.objects.get(username=self.username)
# create an 'fs' plugin instance
plugin = Plugin.objects.get(meta__name="pacspull")
(fs_inst, tf) = PluginInstance.objects.get_or_create(
plugin=plugin, owner=user, compute_resource=plugin.compute_resources.all()[0])
# create a tree of 'ds' plugin instances
plugin = Plugin.objects.get(meta__name="mri_convert")
PluginInstance.objects.get_or_create(
plugin=plugin, owner=user, previous=fs_inst,
compute_resource=plugin.compute_resources.all()[0])
(pl_meta, tf) = PluginMeta.objects.get_or_create(name='mri_info', type='ds')
(plugin, tf) = Plugin.objects.get_or_create(meta=pl_meta, version='0.1')
plugin.compute_resources.set([self.compute_resource])
plugin.save()
(ds_inst, tf) = PluginInstance.objects.get_or_create(
plugin=plugin, owner=user, previous=fs_inst,
compute_resource=plugin.compute_resources.all()[0])
(pl_meta, tf) = PluginMeta.objects.get_or_create(name='mri_surf', type='ds')
(plugin, tf) = Plugin.objects.get_or_create(meta=pl_meta, version='0.1')
plugin.compute_resources.set([self.compute_resource])
plugin.save()
PluginInstance.objects.get_or_create(
plugin=plugin, owner=user, previous=ds_inst,
compute_resource=plugin.compute_resources.all()[0])
self.list_url = reverse("plugininstance-descendant-list", kwargs={"pk": fs_inst.id})
def test_plugin_instance_descendant_list_success(self):
self.client.login(username=self.username, password=self.password)
response = self.client.get(self.list_url)
# response should contain all the instances in the tree
self.assertContains(response, "pacspull")
self.assertContains(response, "mri_convert")
self.assertContains(response, "mri_info")
self.assertContains(response, "mri_surf")
def test_plugin_instance_descendant_list_failure_unauthenticated(self):
response = self.client.get(self.list_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class PluginInstanceParameterListViewTests(ViewTests):
"""
Test the plugininstance-parameter-list view.
"""
def setUp(self):
super(PluginInstanceParameterListViewTests, self).setUp()
user = User.objects.get(username=self.username)
# create a plugin
plugin = Plugin.objects.get(meta__name="pacspull")
parameters = [{"type": "path", "name": "param1", "flag": "--param1"},
{"type": "float", "name": "param2", "flag": "--param2"}]
# add plugin's parameters
(param1, tf) = PluginParameter.objects.get_or_create(
plugin=plugin,
name=parameters[0]['name'],
type=parameters[0]['type'],
flag=parameters[0]['flag'])
(param2, tf) = PluginParameter.objects.get_or_create(
plugin=plugin,
name=parameters[1]['name'],
type=parameters[1]['type'],
flag=parameters[1]['flag'])
# create a plugin instance
(inst, tf) = PluginInstance.objects.get_or_create(
plugin=plugin, owner=user, compute_resource=plugin.compute_resources.all()[0])
# create two plugin parameter instances associated to the plugin instance
PathParameter.objects.get_or_create(plugin_inst=inst, plugin_param=param1,
value=self.username)
FloatParameter.objects.get_or_create(plugin_inst=inst, plugin_param=param2,
value=3.14)
self.list_url = reverse("plugininstance-parameter-list", kwargs={"pk": inst.id})
def test_plugin_instance_parameter_list_success(self):
self.client.login(username=self.username, password=self.password)
response = self.client.get(self.list_url)
self.assertContains(response, "param1")
self.assertContains(response, self.username)
self.assertContains(response, "param2")
self.assertContains(response, 3.14)
def test_plugin_instance_parameter_list_failure_unauthenticated(self):
response = self.client.get(self.list_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class PluginInstanceFileViewTests(ViewTests):
"""
Generic plugin instance file view tests' setup and tearDown.
"""
def setUp(self):
super(PluginInstanceFileViewTests, self).setUp()
# create a plugin instance
user = User.objects.get(username=self.username)
plugin = Plugin.objects.get(meta__name="pacspull")
(self.plg_inst, tf) = PluginInstance.objects.get_or_create(
plugin=plugin, owner=user, compute_resource=plugin.compute_resources.all()[0])
# create test directory where files are created
# self.test_dir = settings.MEDIA_ROOT + '/test'
# settings.MEDIA_ROOT = self.test_dir
# if not os.path.exists(self.test_dir):
# os.makedirs(self.test_dir)
def tearDown(self):
super(PluginInstanceFileViewTests, self).tearDown()
# remove test directory
# shutil.rmtree(self.test_dir)
# settings.MEDIA_ROOT = os.path.dirname(self.test_dir)
class PluginInstanceFileListViewTests(PluginInstanceFileViewTests):
"""
Test the plugininstancefile-list view.
"""
def setUp(self):
super(PluginInstanceFileListViewTests, self).setUp()
# create a plugin instance file associated to the plugin instance
plg_inst = self.plg_inst
(plg_inst_file, tf) = PluginInstanceFile.objects.get_or_create(plugin_inst=plg_inst)
plg_inst_file.fname.name = 'test_file.txt'
plg_inst_file.save()
self.list_url = reverse("plugininstancefile-list", kwargs={"pk": plg_inst.id})
def test_plugin_instance_file_create_failure_post_not_allowed(self):
self.client.login(username=self.username, password=self.password)
# try to create a new plugin file with a POST request to the list
# POST request using multipart/form-data to be able to upload file
with io.StringIO("test file") as f:
post = {"fname": f}
response = self.client.post(self.list_url, data=post)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_plugin_instance_file_list_success(self):
self.client.login(username=self.username, password=self.password)
response = self.client.get(self.list_url)
self.assertContains(response, "test_file.txt")
def test_plugin_instance_file_list_failure_unauthenticated(self):
response = self.client.get(self.list_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_plugin_instance_file_list_failure_access_denied(self):
self.client.login(username=self.other_username, password=self.other_password)
response = self.client.get(self.list_url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class AllPluginInstanceFileListViewTests(PluginInstanceFileViewTests):
"""
Test the allplugininstancefile-list view.
"""
def setUp(self):
super(AllPluginInstanceFileListViewTests, self).setUp()
# create a plugin instance file associated to the plugin instance
plg_inst = self.plg_inst
(plg_inst_file, tf) = PluginInstanceFile.objects.get_or_create(plugin_inst=plg_inst)
plg_inst_file.fname.name = 'test_file.txt'
plg_inst_file.save()
self.list_url = reverse("allplugininstancefile-list")
def test_all_plugin_instance_file_create_failure_post_not_allowed(self):
self.client.login(username=self.username, password=self.password)
# try to create a new plugin file with a POST request to the list
# POST request using multipart/form-data to be able to upload file
with io.StringIO("test file") as f:
post = {"fname": f}
response = self.client.post(self.list_url, data=post)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_all_plugin_instance_file_list_success(self):
self.client.login(username=self.username, password=self.password)
response = self.client.get(self.list_url)
self.assertContains(response, "test_file.txt")
def test_all_plugin_instance_file_list_from_shared_feed_success(self):
self.client.login(username=self.other_username, password=self.other_password)
plg_inst = self.plg_inst
user1 = User.objects.get(username=self.username)
user2 = User.objects.get(username=self.other_username)
plg_inst.feed.owner.set([user1, user2])
response = self.client.get(self.list_url)
self.assertContains(response, "test_file.txt")
def test_all_plugin_instance_file_list_failure_unauthenticated(self):
response = self.client.get(self.list_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_all_plugin_instance_file_list_files_in_not_owned_feeds_inaccessible(self):
self.client.login(username=self.other_username, password=self.other_password)
response = self.client.get(self.list_url)
self.assertNotContains(response, "test_file.txt")
class AllPluginInstanceFileListQuerySearchViewTests(PluginInstanceFileViewTests):
"""
Test the allplugininstancefile-list-query-search view.
"""
def setUp(self):
super(AllPluginInstanceFileListQuerySearchViewTests, self).setUp()
# create a plugin instance file associated to the plugin instance
plg_inst = self.plg_inst
(plg_inst_file, tf) = PluginInstanceFile.objects.get_or_create(plugin_inst=plg_inst)
plg_inst_file.fname.name = 'test_file.txt'
plg_inst_file.save()
self.list_url = reverse("allplugininstancefile-list-query-search") + '?id=' + \
str(plg_inst_file.id)
def test_plugin_instance_query_search_list_success(self):
self.client.login(username=self.username, password=self.password)
response = self.client.get(self.list_url)
self.assertContains(response, 'test_file.txt')
def test_plugin_instance_query_search_list_failure_unauthenticated(self):
response = self.client.get(self.list_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class PluginInstanceFileDetailViewTests(PluginInstanceFileViewTests):
"""
Test the plugininstancefile-detail view.
"""
def setUp(self):
super(PluginInstanceFileDetailViewTests, self).setUp()
#self.corresponding_feed_url = reverse("feed-detail", kwargs={"pk": feed.id})
plg_inst = self.plg_inst
self.corresponding_plugin_instance_url = reverse("plugininstance-detail",
kwargs={"pk": plg_inst.id})
# create a file in the DB "already uploaded" to the server
(plg_inst_file, tf) = PluginInstanceFile.objects.get_or_create(plugin_inst=plg_inst)
plg_inst_file.fname.name = 'file1.txt'
plg_inst_file.save()
self.read_url = reverse("plugininstancefile-detail",
kwargs={"pk": plg_inst_file.id})
def test_plugin_instance_file_detail_success(self):
self.client.login(username=self.username, password=self.password)
response = self.client.get(self.read_url)
self.assertContains(response, "file1.txt")
self.assertTrue(response.data["plugin_inst"].endswith(
self.corresponding_plugin_instance_url))
def test_plugin_instance_file_detail_success_user_chris(self):
self.client.login(username=self.chris_username, password=self.chris_password)
response = self.client.get(self.read_url)
self.assertContains(response, "file1.txt")
self.assertTrue(response.data["plugin_inst"].endswith(
self.corresponding_plugin_instance_url))
def test_plugin_instance_file_detail_failure_not_related_feed_owner(self):
self.client.login(username=self.other_username, password=self.other_password)
response = self.client.get(self.read_url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_plugin_instance_file_detail_failure_unauthenticated(self):
response = self.client.get(self.read_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class FileResourceViewTests(PluginInstanceFileViewTests):
"""
Test the plugininstancefile-resource view.
"""
def setUp(self):
super(FileResourceViewTests, self).setUp()
plg_inst = self.plg_inst
# create a file in the DB "already uploaded" to the server
(plg_inst_file, tf) = PluginInstanceFile.objects.get_or_create(
plugin_inst=plg_inst)
plg_inst_file.fname.name = '/tests/file1.txt'
plg_inst_file.save()
self.download_url = reverse("plugininstancefile-resource",
kwargs={"pk": plg_inst_file.id}) + 'file1.txt'
def test_fileresource_get(self):
plg_inst_file = PluginInstanceFile.objects.get(fname="/tests/file1.txt")
fileresource_view_inst = mock.Mock()
fileresource_view_inst.get_object = mock.Mock(return_value=plg_inst_file)
request_mock = mock.Mock()
with mock.patch('plugininstances.views.Response') as response_mock:
views.FileResource.get(fileresource_view_inst, request_mock)
response_mock.assert_called_with(plg_inst_file.fname)
@tag('integration')
def test_integration_fileresource_download_success(self):
swift_manager = SwiftManager(settings.SWIFT_CONTAINER_NAME,
settings.SWIFT_CONNECTION_PARAMS)
# upload file to Swift storage
with io.StringIO("test file") as file1:
swift_manager.upload_obj('/tests/file1.txt', file1.read(),
content_type='text/plain')
self.client.login(username=self.username, password=self.password)
response = self.client.get(self.download_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(str(response.content, 'utf-8'), "test file")
# delete file from Swift storage
swift_manager.delete_obj('/tests/file1.txt')
def test_fileresource_download_failure_not_related_feed_owner(self):
self.client.login(username=self.other_username, password=self.other_password)
response = self.client.get(self.download_url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_fileresource_download_failure_unauthenticated(self):
response = self.client.get(self.download_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
|
'use strict';
const crypto = require('crypto');
/*!
* 对提交参数一层封装,当POST JSON,并且结果也为JSON时使用 */
exports.postJSON = function (data) {
return {
dataType: 'json',
method: 'POST',
data: JSON.stringify(data),
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json'
}
};
};
exports.getData = function () {
return {
method: 'GET',
headers: {
'Accept': 'application/json'
}
};
};
const JSONCtlCharsMap = {
'"': '\\"', // \u0022
'\\': '\\', // \u005c
'\b': '\\b', // \u0008
'\f': '\\f', // \u000c
'\n': '\\n', // \u000a
'\r': '\\r', // \u000d
'\t': '\\t' // \u0009
};
const JSONCtlCharsRE = /[\u0000-\u001F\u005C]/g;
function _replaceOneChar(c) {
return JSONCtlCharsMap[c] || '\\u' + (c.charCodeAt(0) + 0x10000).toString(16).substr(1);
}
exports.replaceJSONCtlChars = function (str) {
return str.replace(JSONCtlCharsRE, _replaceOneChar);
};
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const node_opcua_constants_1 = require("node-opcua-constants");
const node_opcua_nodeid_1 = require("node-opcua-nodeid");
function makeRefId(referenceTypeName) {
const nodeId = node_opcua_nodeid_1.makeNodeId(node_opcua_constants_1.ReferenceTypeIds[referenceTypeName]
|| node_opcua_constants_1.ObjectTypeIds[referenceTypeName]);
// istanbul ignore next
if (nodeId.isEmpty()) {
throw new Error("makeRefId: cannot find ReferenceTypeName + " + referenceTypeName);
}
return nodeId;
}
exports.makeRefId = makeRefId;
//# sourceMappingURL=proxy.js.map |
import hpolib.benchmarks.synthetic_functions as hpobench
from febo.environment import ContinuousDomain
from febo.environment.benchmarks import BenchmarkEnvironment
from hpolib.benchmarks import synthetic_functions
import numpy as np
class HpolibBenchmark(BenchmarkEnvironment):
"""
Abstract class to convert Hpolib benchmarks.
"""
def __init__(self, bench, path=None, min_value=-np.inf):
super().__init__(path)
self._bench = bench
info = bench.get_meta_information()
self._max_value = -info['f_opt']
l = np.array([b[0] for b in info['bounds']])
u = np.array([b[1] for b in info['bounds']])
self._domain = ContinuousDomain(l, u)
self._x0 = l + 0.1*self._domain.range
self._min_value = min_value
def f(self, x):
return np.maximum(-self._bench(x), self._min_value)
class Branin(HpolibBenchmark):
"""
d=2
"""
def __init__(self, path=None):
super().__init__(synthetic_functions.Branin(), path, min_value=-2)
class Hartmann3(HpolibBenchmark):
"""
d=3
"""
def __init__(self, path=None):
super().__init__(synthetic_functions.Hartmann3(), path)
class Hartmann6(HpolibBenchmark):
"""
d=6
"""
def __init__(self, path=None):
super().__init__(synthetic_functions.Hartmann6(), path)
self._x0 = np.array([0.1335990371483741, 0.2743781816448671, 0.2879962344461537, 0.10242147970254536, 0.3959197145814795, 0.5982863622683936])
self._old_max_value = self._max_value
self._max_value = 1.
def f(self, X):
return super().f(X)/self._old_max_value
class Camelback(HpolibBenchmark):
"""
d=2
"""
def __init__(self, path=None):
super().__init__(synthetic_functions.Camelback(), path)
# overwrite domain to get a reasonable range of function values
self._domain = ContinuousDomain(np.array([-2, -1]), np.array([2, 1]))
class Forrester(HpolibBenchmark):
"""
d=1
"""
def __init__(self, path=None):
super().__init__(synthetic_functions.Forrester(), path)
class Bohachevsky(HpolibBenchmark):
"""
d=2
"""
def __init__(self, path=None):
super().__init__(synthetic_functions.Bohachevsky(), path)
class GoldsteinPrice(HpolibBenchmark):
"""
d=2
"""
def __init__(self, path=None):
super().__init__(synthetic_functions.GoldsteinPrice(), path)
class Levy(HpolibBenchmark):
"""
d=1
"""
def __init__(self, path=None):
super().__init__(synthetic_functions.Levy(), path)
class Rosenbrock(HpolibBenchmark):
"""
d=2
"""
def __init__(self, path=None):
super().__init__(synthetic_functions.Rosenbrock(), path)
class Rosenbrock5D(HpolibBenchmark):
"""
d=5
"""
def __init__(self, path=None):
super().__init__(synthetic_functions.rosenbrock.Rosenbrock5D(), path)
class Rosenbrock10D(HpolibBenchmark):
"""
d=10
"""
def __init__(self, path=None):
super().__init__(synthetic_functions.rosenbrock.Rosenbrock10D(), path)
class Rosenbrock20D(HpolibBenchmark):
"""
d=20
"""
def __init__(self, path=None):
super().__init__(synthetic_functions.rosenbrock.Rosenbrock20D(), path)
class SinOne(HpolibBenchmark):
"""
d=1
"""
def __init__(self, path=None):
super().__init__(synthetic_functions.SinOne(), path)
class SinTwo(HpolibBenchmark):
"""
d=2
"""
def __init__(self, path=None):
super().__init__(synthetic_functions.SinTwo(), path) |
/*!
=========================================================
* Paper Dashboard React - v1.3.0
=========================================================
* Product Page: https://www.creative-tim.com/product/paper-dashboard-react
* Copyright 2021 Creative Tim (https://www.creative-tim.com)
* Licensed under MIT (https://github.com/creativetimofficial/paper-dashboard-react/blob/main/LICENSE.md)
* Coded by Creative Tim
=========================================================
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*/
import React from "react";
import ReactDOM from "react-dom";
import { BrowserRouter, Route, Switch, Redirect } from "react-router-dom";
import "bootstrap/dist/css/bootstrap.css";
import "assets/scss/paper-dashboard.scss?v=1.3.0";
import "assets/demo/demo.css";
import "perfect-scrollbar/css/perfect-scrollbar.css";
import AdminLayout from "layouts/Admin.js";
import Login from "./views/Login";
ReactDOM.render(
<BrowserRouter>
<Switch>
<Route path="/admin" render={(props) => <AdminLayout {...props} />} />
<Route path="/auth" render={(props) => <Login {...props} />} />
<Redirect to="/admin/dashboard" />
</Switch>
</BrowserRouter>,
document.getElementById("root")
);
|
/**
* VexFlow - Accidental Tests
* Copyright Mohit Muthanna 2010 <[email protected]>
*/
Vex.Flow.Test.Bend = {}
Vex.Flow.Test.Bend.Start = function() {
module("Bend");
Vex.Flow.Test.runTest("Double Bends", Vex.Flow.Test.Bend.doubleBends);
Vex.Flow.Test.runRaphaelTest("Double Bends (Raphael)",
Vex.Flow.Test.Bend.doubleBends);
Vex.Flow.Test.runTest("Reverse Bends", Vex.Flow.Test.Bend.reverseBends);
Vex.Flow.Test.runTest("Double Bends With Release",
Vex.Flow.Test.Bend.doubleBendsWithRelease);
Vex.Flow.Test.runRaphaelTest("Double Bends With Release (Raphael)",
Vex.Flow.Test.Bend.doubleBendsWithRelease);
}
Vex.Flow.Test.Bend.doubleBends = function(options, contextBuilder) {
var ctx = new contextBuilder(options.canvas_sel, 500, 240);
ctx.scale(1.5, 1.5); ctx.fillStyle = "#221"; ctx.strokeStyle = "#221";
ctx.font = "bold 8pt Arial";
var stave = new Vex.Flow.TabStave(10, 10, 450).
addTabGlyph().setContext(ctx).draw();
function newNote(tab_struct) { return new Vex.Flow.TabNote(tab_struct); }
function newBend(text) { return new Vex.Flow.Bend(text); }
var notes = [
newNote({
positions: [{str: 2, fret: 10}, {str: 4, fret: 9}], duration: "q" }).
addModifier(newBend("Full"), 0).
addModifier(newBend("1/2"), 1),
newNote({
positions: [{str: 2, fret: 5}, {str: 3, fret: 5}], duration: "q" }).
addModifier(newBend("1/4"), 0).
addModifier(newBend("1/4"), 1),
newNote({
positions: [{str: 4, fret: 7}], duration: "h" })
];
Vex.Flow.Formatter.FormatAndDraw(ctx, stave, notes);
ok(true, "Double Bends");
}
Vex.Flow.Test.Bend.doubleBendsWithRelease = function(options, contextBuilder) {
var ctx = new contextBuilder(options.canvas_sel, 500, 240);
ctx.scale(1.5, 1.5); ctx.fillStyle = "#221"; ctx.strokeStyle = "#221";
ctx.setFont("Arial", 8);
var stave = new Vex.Flow.TabStave(10, 10, 450).
addTabGlyph().setContext(ctx).draw();
function newNote(tab_struct) { return new Vex.Flow.TabNote(tab_struct); }
function newBend(text, release) { return new Vex.Flow.Bend(text, release); }
var notes = [
newNote({
positions: [{str: 1, fret: 10}, {str: 4, fret: 9}], duration: "q" }).
addModifier(newBend("1/2", true), 0).
addModifier(newBend("Full", true), 1),
newNote({
positions: [{str: 2, fret: 5},
{str: 3, fret: 5},
{str: 4, fret: 5}], duration: "q" }).
addModifier(newBend("1/4", true), 0).
addModifier(newBend("Monstrous", true), 1).
addModifier(newBend("1/4", true), 2),
newNote({
positions: [{str: 4, fret: 7}], duration: "q" }),
newNote({
positions: [{str: 4, fret: 7}], duration: "q" })
];
Vex.Flow.Formatter.FormatAndDraw(ctx, stave, notes);
ok(true, "Bend Release");
}
Vex.Flow.Test.Bend.reverseBends = function(options) {
Vex.Flow.Test.resizeCanvas(options.canvas_sel, 500, 240);
var ctx = Vex.getCanvasContext(options.canvas_sel);
ctx.scale(1.5, 1.5); ctx.fillStyle = "#221"; ctx.strokeStyle = "#221";
ctx.font = "bold 8pt Arial";
var stave = new Vex.Flow.TabStave(10, 10, 450).
addTabGlyph().setContext(ctx).draw();
function newNote(tab_struct) { return new Vex.Flow.TabNote(tab_struct); }
function newBend(text) { return new Vex.Flow.Bend(text); }
var notes = [
newNote({
positions: [{str: 2, fret: 10}, {str: 4, fret: 9}], duration: "w" }).
addModifier(newBend("Full"), 1).
addModifier(newBend("1/2"), 0),
newNote({
positions: [{str: 2, fret: 5}, {str: 3, fret: 5}], duration: "w" }).
addModifier(newBend("1/4"), 1).
addModifier(newBend("1/4"), 0),
newNote({
positions: [{str: 4, fret: 7}], duration: "w" })
];
for (var i = 0; i < notes.length; ++i) {
var note = notes[i];
var mc = new Vex.Flow.ModifierContext();
note.addToModifierContext(mc);
var tickContext = new Vex.Flow.TickContext();
tickContext.addTickable(note).preFormat().setX(50 * i).setPixelsUsed(95);
note.setStave(stave).setContext(ctx).draw();
ok(true, "Bend " + i);
}
}
|
/********************************************************************************
USB Host Audio v1.0 Class Driver Interface Definition
Company:
Microchip Technology Inc.
File Name:
usb_host_audio_v1_0.h
Summary:
USB Host Audio v1_0 Class Driver Interface Header
Description:
This header file contains the function prototypes and definitions of the
data types and constants that make up the interface to the USB Host Audio
v1.0 Class Driver.
*******************************************************************************/
//DOM-IGNORE-BEGIN
/*******************************************************************************
* Copyright (C) 2018 Microchip Technology Inc. and its subsidiaries.
*
* Subject to your compliance with these terms, you may use Microchip software
* and any derivatives exclusively with Microchip products. It is your
* responsibility to comply with third party license terms applicable to your
* use of third party software (including open source software) that may
* accompany Microchip software.
*
* THIS SOFTWARE IS SUPPLIED BY MICROCHIP "AS IS". NO WARRANTIES, WHETHER
* EXPRESS, IMPLIED OR STATUTORY, APPLY TO THIS SOFTWARE, INCLUDING ANY IMPLIED
* WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY, AND FITNESS FOR A
* PARTICULAR PURPOSE.
*
* IN NO EVENT WILL MICROCHIP BE LIABLE FOR ANY INDIRECT, SPECIAL, PUNITIVE,
* INCIDENTAL OR CONSEQUENTIAL LOSS, DAMAGE, COST OR EXPENSE OF ANY KIND
* WHATSOEVER RELATED TO THE SOFTWARE, HOWEVER CAUSED, EVEN IF MICROCHIP HAS
* BEEN ADVISED OF THE POSSIBILITY OR THE DAMAGES ARE FORESEEABLE. TO THE
* FULLEST EXTENT ALLOWED BY LAW, MICROCHIP'S TOTAL LIABILITY ON ALL CLAIMS IN
* ANY WAY RELATED TO THIS SOFTWARE WILL NOT EXCEED THE AMOUNT OF FEES, IF ANY,
* THAT YOU HAVE PAID DIRECTLY TO MICROCHIP FOR THIS SOFTWARE.
*******************************************************************************/
#ifndef _USB_HOST_AUDIO_V1_H_
#define _USB_HOST_AUDIO_V1_H_
//DOM-IGNORE-END
// *****************************************************************************
// *****************************************************************************
// Section: Included Files
// *****************************************************************************
// *****************************************************************************
#include "usb/usb_host.h"
#include "usb/usb_host_client_driver.h"
#include "usb/usb_audio_v1_0.h"
// DOM-IGNORE-BEGIN
#ifdef __cplusplus // Provide C++ Compatibility
extern "C" {
#endif
// DOM-IGNORE-END
// *****************************************************************************
// *****************************************************************************
// Section: Data Types and Constants
// *****************************************************************************
// *****************************************************************************
// *****************************************************************************
/* USB Host Audio v1.0 Object
Summary:
Defines the type of the Audio v1.0 Host client object.
Description:
This data type defines the type of the Audio Host client object. This type
is returned by the client driver attach event handler and is used by the
application to open the attached Audio v1.0 Device.
Remarks:
None.
*/
typedef uintptr_t USB_HOST_AUDIO_V1_OBJ;
// *****************************************************************************
/* USB Host Audio v1.0 Streaming interface Object
Summary:
Defines the type of the Audio v1.0 Host streaming interface object.
Description:
This data type defines the type of the Audio v1.0 Host streaming interface
object. This type is returned by the USB_AUDIO_V1_StreamingInterfaceGetFirst
and USB_AUDIO_V1_StreamingInterfaceGetNext functions.
Remarks:
None.
*/
typedef uintptr_t USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ;
// *****************************************************************************
/* USB Host Audio v1.0 Streaming Interface Setting Object
Summary:
Defines the type of the Audio v1.0 Host streaming interface setting object.
Description:
This data type defines the type of the Audio v1.0 Host streaming interface
setting object. This type is returned by the
USB_AUDIO_V1_StreamingInterfaceSettingGetFirst and
USB_AUDIO_V1_StreamingInterfaceSettingGetNext functions.
Remarks:
None.
*/
typedef uintptr_t USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ;
// *****************************************************************************
/* USB Host Audio v1.0 Client Driver Request Handle
Summary:
USB Host Audio v1.0 Client Driver request handle.
Description:
This handle is returned by the Audio v1.0 Host client driver entity control
functions and audio stream control request functions. Applications should use
this handle to track a request.
Remarks:
None.
*/
typedef uintptr_t USB_HOST_AUDIO_V1_REQUEST_HANDLE;
// *****************************************************************************
/* USB Host Audio v1.0 Client Driver Invalid Request Handle
Summary:
USB Host Audio v1.0 Client Driver invalid request handle.
Description:
This handle is returned by the Audio v1.0 Client driver command routines when the
request could not be scheduled.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_REQUEST_HANDLE_INVALID ((USB_HOST_AUDIO_V1_REQUEST_HANDLE)(-1))
// *****************************************************************************
/* USB Host Audio v1.0 Control Entity Object
Summary:
Defines the type of the Audio v1.0 Host control entity object.
Description:
This data type defines the type of the object returned by the
USB_HOST_AUDIO_V1_ControlEntityGetFirst or
USB_HOST_AUDIO_V1_ControlEntityGetNext functions. This application uses
this object to get more information about that audio control entity.
Remarks:
None.
*/
typedef uintptr_t USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ;
// *****************************************************************************
/* USB Host Audio stream handle
Summary:
Defines the type of the Audio v1.0 Host stream handle.
Description:
This data type defines the type of the handle returned by
USB_HOST_AUDIO_V1_StreamOpen function. The application uses this handle
to interact with an Audio Stream.
Remarks:
None.
*/
typedef uintptr_t USB_HOST_AUDIO_V1_STREAM_HANDLE;
// *****************************************************************************
/* USB Host Audio stream Invalid handle
Summary:
Defines Audio v1.0 Host stream invalid handle.
Description:
This handle is returned by the USB_HOST_AUDIO_V1_StreamOpen function when a stream
open has failed.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_STREAM_HANDLE_INVALID ((USB_HOST_AUDIO_V1_STREAM_HANDLE)(-1))
// *****************************************************************************
/* USB Host Audio v1.0 Class Driver Stream Data Transfer Handle
Summary:
USB Host Audio v1.0 Class Driver stream data transfer handle.
Description:
This handle is returned by the Audio v1.0 Class driver stream data transfer
functions and should be used by the application to track the transfer,
especially in cases where transfers are queued.
Remarks:
None.
*/
typedef uintptr_t USB_HOST_AUDIO_V1_STREAM_TRANSFER_HANDLE;
// *****************************************************************************
/* USB Host Audio v1.0 Class Driver Invalid Stream Data Transfer Handle Definition
Summary:
USB Host Audio v1.0 Class Driver invalid stream data transfer handle.
Description:
This macro defines a USB Host Audio v1.0 Class Driver invalid stream
data transfer handle. An invalid transfer handle is returned by the Audio
v1.0 Class Driver stream data transfer routines when the request was not
successful.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_STREAM_TRANSFER_HANDLE_INVALID ((USB_HOST_AUDIO_V1_STREAM_TRANSFER_HANDLE)(-1))
/*DOM-IGNORE-BEGIN*/
#define USB_HOST_AUDIO_V1_RESULT_MIN -100
/*DOM-IGNORE-END*/
// *****************************************************************************
/* USB Host Audio v1.0 Class Driver Result enumeration.
Summary:
USB Host Audio v1.0 Class Driver result enumeration.
Description:
This enumeration lists the possible USB Host Audio v1.0 Class Driver
operation results. These values are returned by Audio v1.0 Class Driver
functions.
Remarks:
None.
*/
typedef enum
{
/* An unknown failure has occurred */
USB_HOST_AUDIO_V1_RESULT_FAILURE /*DOM-IGNORE-BEGIN*/ = USB_HOST_AUDIO_V1_RESULT_MIN /*DOM-IGNORE-END*/,
/* The transfer or request could not be scheduled because internal queues
are full. The request or transfer should be retried */
USB_HOST_AUDIO_V1_RESULT_BUSY,
/* The request was stalled */
USB_HOST_AUDIO_V1_RESULT_REQUEST_STALLED,
/* A required parameter was invalid */
USB_HOST_AUDIO_V1_RESULT_INVALID_PARAMETER,
/* The associated device does not exist in the system. */
USB_HOST_AUDIO_V1_RESULT_DEVICE_UNKNOWN,
/* The specified handle is not valid */
USB_HOST_AUDIO_V1_RESULT_HANDLE_INVALID,
/* The transfer or requested was aborted */
USB_HOST_AUDIO_V1_RESULT_TRANSFER_ABORTED,
/* The specified Audio v1.0 object is invalid */
USB_HOST_AUDIO_V1_RESULT_OBJ_INVALID,
/* No more audio control entity */
USB_HOST_AUDIO_V1_RESULT_END_OF_CONTROL_ENTITY,
/* No more streaming interface settings present in the audio device */
USB_HOST_AUDIO_V1_RESULT_END_OF_STREAMING_INTERFACE,
/* No more interface alternate settings are present in the audio streaming
interface */
USB_HOST_AUDIO_V1_RESULT_END_OF_INTERFACE_SETTINGS,
/* Indicates that the operation succeeded or the request was accepted and
will be processed. */
USB_HOST_AUDIO_V1_RESULT_SUCCESS
/*DOM-IGNORE-BEGIN*/ = 1 /*DOM-IGNORE-END*/,
} USB_HOST_AUDIO_V1_RESULT;
// *****************************************************************************
/* USB Host Audio v1.0 Stream Event Handler Return Type
Summary:
Returns the type of the USB Host Audio v1.0 stream event handler.
Description:
This enumeration lists the possible return values of the USB Host Audio v1.0
stream event handler.
Remarks:
None.
*/
typedef enum
{
/* This means no response is required */
USB_HOST_AUDIO_V1_STREAM_EVENT_RESPONSE_NONE /*DOM-IGNORE-BEGIN*/= 0 /*DOM-IGNORE-END*/
} USB_HOST_AUDIO_V1_STREAM_EVENT_RESPONSE;
// *****************************************************************************
/* USB Host Audio v1.0 Class Driver Stream Direction
Summary:
USB Host Audio v1.0 Class Driver stream direction.
Description:
This enumeration lists the possible audio stream directions.
Remarks:
None.
*/
typedef enum
{
/* Stream Direction Host to Device */
USB_HOST_AUDIO_V1_DIRECTION_OUT /*DOM-IGNORE-BEGIN*/= 0 /*DOM-IGNORE-END*/,
/* Stream Direction Device to Host */
USB_HOST_AUDIO_V1_DIRECTION_IN /*DOM-IGNORE-BEGIN*/= 1 /*DOM-IGNORE-END*/,
} USB_HOST_AUDIO_V1_STREAM_DIRECTION;
// *****************************************************************************
/* Audio v1.0 Class Driver Events
Summary:
Identifies the possible events that the Audio v1.0 Class Driver attach event
handler can generate.
Description:
This enumeration identifies the possible events that the Audio v1.0 Class
Driver attach event handler can generate. The application should register an
event handler using the USB_HOST_AUDIO_V1_AttachEventHandlerSet function
to receive Audio v1.0 Class Driver Attach events.
*/
typedef enum
{
/* This event occurs when the Host layer has detected the Audio v1.0 Class
Driver instance from a USB Audio v1.0 Device. There is no event data
associated with this event. */
USB_HOST_AUDIO_V1_EVENT_ATTACH,
/* This event occurs when host layer has detached the Audio v1.0 Class
Driver instance from a USB Audio v1.0 Device. This can happen if the
device itself was detached or if the device configuration was changed.
There is no event data associated with this event. */
USB_HOST_AUDIO_V1_EVENT_DETACH,
} USB_HOST_AUDIO_V1_EVENT;
// *****************************************************************************
/* Audio v1.0 Stream Events
Summary:
Identifies the possible events that the Audio v1.0 Stream can generate.
Description:
This enumeration identifies the possible events that the Audio v1.0 Stream
can generate. The application should register an event handler using the
USB_HOST_AUDIO_V1_StreamEventHandlerSet function to receive Audio v1.0
stream events.
An event may have data associated with it. Events that are generated due
to a transfer of data between the host and device are accompanied by data
structures that provide the status of the transfer termination. For
example, the USB_HOST_AUDIO_V1_STREAM_EVENT_READ_COMPLETE event is
accompanied by a pointer to a
USB_HOST_AUDIO_V1_STREAM_EVENT_READ_COMPLETE_DATA data structure. The
transferStatus member of this data structure indicates the success or
failure of the transfer. A transfer may fail due to the device not responding
on the bus, or if the device stalls any stages of the transfer. The event
description provides details on the nature of the event and the data that
is associated with the event.
*/
typedef enum
{
/* This event occurs when a Audio v1.0 stream read operation has completed
(i.e., when the data has been received from the connected Audio v1.0
stream). This event is generated after the application calls the
USB_HOST_AUDIO_V1_StreamRead function. The eventData parameter in the
event callback function will be of a pointer to a
USB_HOST_AUDIO_V1_STREAM_EVENT_READ_COMPLETE_DATA structure. This
contains details about the transfer handle associated with this read
request, the amount of data read and the termination status of the read
request. */
USB_HOST_AUDIO_V1_STREAM_EVENT_READ_COMPLETE,
/* This event occurs when an Audio v1.0 stream write operation has
completed (i.e., when the data has been written to the connected Audio v1.0
stream). This event is generated after the application calls the
USB_HOST_AUDIO_V1_StreamWrite function. The eventData parameter in the
event callback function will be of a pointer to a
USB_HOST_AUDIO_V1_STREAM_EVENT_WRITE_COMPLETE_DATA structure. This
contains details about the transfer handle associated with this write
request, the amount of data written and the termination status of the
write request. */
USB_HOST_AUDIO_V1_STREAM_EVENT_WRITE_COMPLETE,
/* This event occurs when an audio streaming set interface request has been
completed. This event is generated after the application calls the
USB_HOST_AUDIO_V1_StreamingInterfaceSet function. The eventData
parameter in the event callback function will be of a pointer to a
USB_HOST_AUDIO_V1_STREAM_EVENT_INTERFACE_SET_COMPLETE_DATA. This contains
details about the request handle associated with the interface set
request and the termination status of the request.*/
USB_HOST_AUDIO_V1_STREAM_EVENT_INTERFACE_SET_COMPLETE,
/* This event occurs when an Audio v1.0 sampling frequency set request has
been completed. This event is generated after the application calls the
USB_HOST_AUDIO_V1_StreamSamplingFrequencySet function. The eventData
parameter in the event callback function will be of a pointer to a
USB_HOST_AUDIO_V1_STREAM_EVENT_SAMPLING_FREQUENCY_SET_COMPLETE_DATA. This
contains details about the request handle associated with this sampling
frequency set request and the termination status of the request.*/
USB_HOST_AUDIO_V1_STREAM_EVENT_SAMPLING_FREQUENCY_SET_COMPLETE,
/* This event occurs when an Audio v1.0 sampling frequency get request has
been completed. This event is generated after the application calls the
USB_HOST_AUDIO_V1_StreamSamplingFrequencyGet function. The eventData
parameter in the event call back function will be of a pointer to a
USB_HOST_AUDIO_V1_STREAM_EVENT_SAMPLING_FREQUENCY_GET_COMPLETE_DATA. This
contains details about the request handle associated with this sampling
frequency get request and the termination status of the request.*/
USB_HOST_AUDIO_V1_STREAM_EVENT_SAMPLING_FREQUENCY_GET_COMPLETE,
/* This event occurs when an audio stream is detached from the Host.This
can happen if the Audio device itself was detached, or if the Audio
device configuration was changed. There is no event data associated with
this event. */
USB_HOST_AUDIO_V1_STREAM_EVENT_DETACH
} USB_HOST_AUDIO_V1_STREAM_EVENT;
// *****************************************************************************
/* USB Host Audio v1.0 Class Stream Data Transfer Event Data.
Summary:
USB Host Audio v1.0 class stream data transfer event data.
Description:
This data type defines the data structure returned by the Audio V1.0 stream
in conjunction with the following events:
- USB_HOST_AUDIO_V1_STREAM_EVENT_READ_COMPLETE_DATA
- USB_HOST_AUDIO_V1_STREAM_EVENT_WRITE_COMPLETE_DATA
Remarks:
None.
*/
typedef struct
{
/* Transfer handle of this transfer */
USB_HOST_AUDIO_V1_STREAM_TRANSFER_HANDLE transferHandle;
/* Amount of data transferred */
size_t length;
/* Transfer termination status */
USB_HOST_AUDIO_V1_RESULT result;
}
USB_HOST_AUDIO_V1_STREAM_EVENT_READ_COMPLETE_DATA,
USB_HOST_AUDIO_V1_STREAM_EVENT_WRITE_COMPLETE_DATA;
// *****************************************************************************
/* USB Host Audio v1.0 Class Stream Control Event Data.
Summary:
USB Host Audio v1.0 class stream control event data.
Description:
This data type defines the data structure returned by the Audio V1.0 stream
in conjunction with the following events:
- USB_HOST_AUDIO_V1_STREAM_EVENT_INTERFACE_SET_COMPLETE
- USB_HOST_AUDIO_V1_STREAM_EVENT_SAMPLING_FREQUENCY_SET_COMPLETE
- USB_HOST_AUDIO_V1_STREAM_EVENT_SAMPLING_FREQUENCY_GET_COMPLETE
Remarks:
None.
*/
typedef struct
{
/* Transfer handle of this transfer */
USB_HOST_AUDIO_V1_REQUEST_HANDLE requestHandle;
/* Transfer termination status */
USB_HOST_AUDIO_V1_RESULT requestStatus;
}
USB_HOST_AUDIO_V1_STREAM_EVENT_INTERFACE_SET_COMPLETE_DATA,
USB_HOST_AUDIO_V1_STREAM_EVENT_SAMPLING_RATE_SET_COMPLETE_DATA,
USB_HOST_AUDIO_V1_STREAM_EVENT_SAMPLING_RATE_GET_COMPLETE_DATA;
// *****************************************************************************
/* USB Host Audio v1.0 Client Driver Attach Event Handler Function Pointer Type.
Summary:
USB Host Audio v1.0 Client Driver attach event handler function pointer type.
Description:
This data type defines the required function signature of the USB Host Audio
v1.0 Client Driver attach event handling callback function. The application
must register a pointer to the Audio v1.0 Client Driver attach events handling
function whose function signature (parameter and return value types) match
the types specified by this function pointer to receive attach and
detach events callbacks from the Audio v1.0 Client Driver. The application
should use the USB_HOST_AUDIO_V1_AttachEventHandlerSet function to register an
attach event handler. The client driver will call this function with the
relevant event parameters. The descriptions of the event handler function
parameters are as follows:
- audioObj - Audio Device object to which this event is directed
- event - Event indicates if it is an Attach or Detach
- context - Value identifying the context of the application that was
registered with the event handling function
Remarks:
None.
*/
typedef void (* USB_HOST_AUDIO_V1_ATTACH_EVENT_HANDLER)
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_EVENT event,
uintptr_t context
);
// *****************************************************************************
/* USB Host Audio v1.0 Class Driver Stream Event Handler Function Pointer Type.
Summary:
USB Host Audio v1.0 Class Driver stream event handler function pointer type.
Description:
This data type defines the required function signature of the USB Host Audio
v1.0 Class Driver Stream event handling callback function. The application
must register a pointer to the Audio v1.0 Class Driver stream events handling
function whose function signature (parameter and return value types) match
the types specified by this function pointer to receive event callbacks from
the Audio v1.0 Class Driver. The application should use the
USB_HOST_AUDIO_V1_StreamEventHandlerSet function to register an audio
stream event handler. The class driver will call this function with the relevant
event parameters. The descriptions of the stream event handler function
parameters are as follows:
- handle - Handle to the Audio v1.0 stream
- event - Type of event generated
- eventData - This parameter should be type casted to an event specific
pointer type based on the event that has occurred. Refer
to the USB_HOST_AUDIO_V1_STREAM_EVENT enumeration
description for more information.
- context - Value identifying the context of the application that
was registered with the event handling function
Remarks:
None.
*/
typedef USB_HOST_AUDIO_V1_STREAM_EVENT_RESPONSE (* USB_HOST_AUDIO_V1_STREAM_EVENT_HANDLER )
(
USB_HOST_AUDIO_V1_STREAM_HANDLE handle,
USB_HOST_AUDIO_V1_STREAM_EVENT event,
void * eventData,
uintptr_t context
);
//*****************************************************************************
/* USB Host Audio v1.0 Class driver Control Transfer Complete Callback Function
Pointer type
Summary:
USB Host Audio v1.0 class driver control transfer complete callback function
pointer type.
Description:
This data type defines the required function signature of the USB Host Audio
v1.0 class driver control transfer complete callback function. The client
must provide a pointer to a control transfer complete callback function
whose function signature (parameter and return value types) must match the
types specified by this function pointer to receive notification
when a control transfer has completed. The application should use the
USB_HOST_AUDIO_V1_EntityRequestCallbackSet function to register an entity
control request callback. The Audio v1.0 client driver will call this
function with the relevant event parameters. The descriptions of the event
handler function parameters are as follows:
- audioObj - Audio v1.0 client driver object associated with this event
- requestHandle - Request handle of the control transfer request that caused
this event
- result - Completion result of the control transfer. This will be
USB_HOST_AUDIO_V1_RESULT_SUCCESS if the control transfer
completed successfully, USB_HOST_AUDIO_V1_RESULT_FAILURE if
an unknown failure occurred, or
USB_HOST_AUDIO_V1_RESULT_REQUEST_STALLED if the request was
stalled.
- size - Size of the data stage that was transferred
- context - Value identifying the context of the application that was
provided when the USB_HOST_AUDIO_V1_ControlRequest
function was called
Remarks:
None.
*/
typedef void (* USB_HOST_AUDIO_V1_ENTITY_REQUEST_CALLBACK)
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_REQUEST_HANDLE requestHandle,
USB_HOST_AUDIO_V1_RESULT result,
size_t size,
uintptr_t context
);
// ****************************************************************************
// ****************************************************************************
// Section: Client Access Functions
// ****************************************************************************
// ****************************************************************************
// ****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_AttachEventHandlerSet
(
USB_HOST_AUDIO_V1_ATTACH_EVENT_HANDLER eventHandler,
uintptr_t context
);
Summary:
Sets an attach/detach event handler.
Description:
This function will set an attach event handler. The attach event handler
will be called when a Audio v1.0 Device has been attached or detached. The
context will be returned in the event handler. This function should be
called before the bus has been enabled.
Precondition:
None.
Parameters:
eventHandler - Pointer to the attach event handler.
context - An application defined context that will be returned in the event
handler.
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - If the attach event handler was registered
successfully
- USB_HOST_AUDIO_V1_RESULT_FAILURE - If the number of registered event handlers
has exceeded USB_HOST_AUDIO_V1_ATTACH_LISTENERS_NUMBER
Remarks:
This function should be called before the USB_HOST_BusEnable function is called.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_AttachEventHandlerSet
(
USB_HOST_AUDIO_V1_ATTACH_EVENT_HANDLER eventHandler,
uintptr_t context
);
// ****************************************************************************
/* Function:
USB_HOST_DEVICE_OBJ_HANDLE USB_HOST_AUDIO_V1_DeviceObjHandleGet
(
USB_HOST_AUDIO_V1_OBJ audioDeviceObj
);
Summary:
Returns the device object handle for this Audio v1.0 Device.
Description:
This function returns the device object handle for this Audio v1.0 Device.
This returned handle can be used by the application to perform device-level
operations, such as obtaining the string descriptors.
Precondition:
None.
Parameters:
audioDeviceObj - Audio V1.0 device object handle returned in the
USB_HOST_AUDIO_V1_ATTACH_EVENT_HANDLER function.
Returns:
Will return a valid device object handle if the device is still connected
to the system. Otherwise, the function will return USB_HOST_DEVICE_OBJ_HANDLE_INVALID.
Remarks:
None.
*/
USB_HOST_DEVICE_OBJ_HANDLE USB_HOST_AUDIO_V1_DeviceObjHandleGet
(
USB_HOST_AUDIO_V1_OBJ audioDeviceObj
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_EntityRequestCallbackSet
(
USB_HOST_AUDIO_V1_OBJ audioDeviceObj,
USB_HOST_AUDIO_V1_CONTROL_EVENT_HANDLER appAudioEntityRequestCallback,
uintptr_t context
);
Summary:
Registers an audio entity request callback function with the Audio v1.0
Client Driver.
Description:
This function registers a callback function for the Audio v1.0 control
entity requests. The Audio v1.0 Host Client Driver will call this
callback function when an audio entity control request is completed.
Precondition:
None.
Parameters:
audioDeviceObj - Audio v1.0 device object.
appAudioEntityRequestCallback - A pointer to event handler function. If NULL,
events will not be generated.
context - Application specific context that is
returned in the event handler.
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_RESULT_OBJ_INVALID - The specified audio object does not exist
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_EntityRequestCallbackSet
(
USB_HOST_AUDIO_V1_OBJ audioDeviceObj,
USB_HOST_AUDIO_V1_ENTITY_REQUEST_CALLBACK appAudioEntityRequestCallback,
uintptr_t context
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamingInterfaceGetFirst
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ* streamingInterfaceObj
);
Summary:
Gets the first streaming interface object from the attached Audio Device.
Description:
This function will get the first streaming interface object from the
attached Audio Device.
Precondition:
The Audio v1.0 Device should have been attached.
Parameters:
audioObj - Audio v1.0 client driver object.
streamingInterfaceObj - Pointer to an audio streaming interface object.
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The request completed successfully
- USB_HOST_AUDIO_V1_RESULT_END_OF_STREAMING_INTERFACE - No more streaming
interfaces are available
- USB_HOST_AUDIO_V1_RESULT_DEVICE_UNKNOWN - Device is not attached
- USB_HOST_AUDIO_V1_RESULT_OBJ_INVALID - Audio Device object is invalid
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An error has occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamingInterfaceGetFirst
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ* streamingInterfaceObj
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamingInterfaceGetNext
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObjCurrent
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ* streamingInterfaceObjNext
);
Summary:
Gets the next streaming interface object from the attached Audio Device.
Description:
This function will get the next streaming interface object from the
attached Audio Device.
Precondition:
The Audio v1.0 Device should have been attached.
Parameters:
audioObj - Audio Device object.
streamingInterfaceObjCurrent - Current audio streaming interface object.
streamingInterfaceObj - Pointer to audio streaming interface object.
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The request completed successfully
- USB_HOST_AUDIO_V1_RESULT_END_OF_STREAMING_INTERFACE - No more streaming
interfaces are available
- USB_HOST_AUDIO_V1_RESULT_DEVICE_UNKNOWN - Device is not attached
- USB_HOST_AUDIO_V1_RESULT_OBJ_INVALID - Audio Device object is invalid
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An error has occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamingInterfaceGetNext
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObjCurrent,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ* streamingInterfaceObjNext
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamingInterfaceSettingGetFirst
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ *interfaceSettingObj
);
Summary:
Gets the first streaming interface setting object within an audio streaming
interface.
Description:
This function gets the first streaming interface setting object within an
audio streaming interface.
Precondition:
The Audio v1.0 Device should have been attached.
Parameters:
audioObj - Audio device object.
streamingInterfaceObj - Audio streaming interface object.
interfaceSettingObj - Pointer to the audio streaming interface setting object.
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The request completed successfully
- USB_HOST_AUDIO_V1_RESULT_END_OF_INTERFACE_SETTINGS - No more streaming
interface settings are available
- USB_HOST_AUDIO_V1_RESULT_DEVICE_UNKNOWN - Device is not attached
- USB_HOST_AUDIO_V1_RESULT_OBJ_INVALID - Audio Device object is invalid
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An error has occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamingInterfaceSettingGetFirst
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ *interfaceSettingObj
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamingInterfaceSettingGetNext
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObjCurrent,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ *interfaceSettingObjNext
);
Summary:
Gets the next streaming interface setting object within an audio streaming
interface.
Description:
This function gets the next streaming interface setting object within an
audio streaming interface.
Precondition:
The Audio v1.0 Device should have been attached.
Parameters:
audioObj - Audio Device object
streamingInterfaceObj - Audio streaming interface object
interfaceSettingObjCurrent - Current audio streaming interface setting object
interfaceSettingObjNext - Pointer to the next audio streaming interface setting object
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The request completed successfully
- USB_HOST_AUDIO_V1_RESULT_END_OF_INTERFACE_SETTINGS - No more streaming
interface settings are available
- USB_HOST_AUDIO_V1_RESULT_DEVICE_UNKNOWN - Device is not attached
- USB_HOST_AUDIO_V1_RESULT_OBJ_INVALID - Audio Device object is invalid
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An error has occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamingInterfaceSettingGetNext
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObjCurrent,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ *interfaceSettingObjNext
);
// *****************************************************************************
/* Function:
uint8_t USB_HOST_AUDIO_V1_StreamingInterfaceTerminalLinkGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
Summary:
Returns the terminal link of the specified streaming interface setting.
Description:
This function returns the terminal link of the specified streaming interface
setting.
Precondition:
The Audio v1.0 Device should have been attached.
Parameters:
audioObj - Audio Device object
streamingInterfaceObj - Audio streaming interface object
interfaceSettingObj - Audio streaming interface setting object
Returns:
The terminal link of the audio streaming interface setting.
Remarks:
None.
*/
uint8_t USB_HOST_AUDIO_V1_StreamingInterfaceTerminalLinkGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
// *****************************************************************************
/* Function:
uint8_t USB_HOST_AUDIO_V1_StreamingInterfaceFormatTagGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
Summary:
Returns the format tag of the specified streaming interface setting.
Description:
This function returns the format tag link of the specified streaming interface
setting.
Precondition:
The Audio v1.0 Device should have been attached.
Parameters:
audioObj - Audio Device object
streamingInterfaceObj - Audio streaming interface object
interfaceSettingObj - Audio streaming interface setting object
Returns:
The format tag of the audio streaming interface setting.
Remarks:
None.
*/
USB_AUDIO_V1_FORMAT_TAG USB_HOST_AUDIO_V1_StreamingInterfaceFormatTagGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
// *****************************************************************************
/* Function:
uint8_t USB_HOST_AUDIO_V1_StreamingInterfaceChannelNumbersGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
Summary:
Returns the number of channels of the specified streaming interface
setting.
Description:
This function returns the number of channels of the specified streaming
interface setting.
Precondition:
The Audio v1.0 Device should have been attached.
Parameters:
audioObj - Audio Device object
streamingInterfaceObj - Audio streaming interface object
interfaceSettingObj - Audio streaming interface setting object
Returns:
The number of channels present in the audio streaming interface setting.
Remarks:
None.
*/
uint8_t USB_HOST_AUDIO_V1_StreamingInterfaceChannelNumbersGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
// *****************************************************************************
/* Function:
uint8_t USB_HOST_AUDIO_V1_StreamingInterfaceSubFrameSizeGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
Summary:
Returns the sub-frame size of the specified streaming interface
setting.
Description:
This function returns the sub-frame size of the specified streaming
interface setting.
Precondition:
The Audio v1.0 Device should have been attached.
Parameters:
audioObj - Audio Device object
streamingInterfaceObj - Audio streaming interface object
interfaceSettingObj - Audio streaming interface setting object
Returns:
The sub-frame size of the audio streaming interface setting.
Remarks:
None.
*/
uint8_t USB_HOST_AUDIO_V1_StreamingInterfaceSubFrameSizeGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
// *****************************************************************************
/* Function:
uint8_t USB_HOST_AUDIO_V1_StreamingInterfaceBitResolutionGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
Summary:
Returns the bit resolution of the specified streaming interface
setting.
Description:
This function returns the bit resolution size of the specified streaming
interface setting.
Precondition:
The Audio v1.0 Device should have been attached.
Parameters:
audioObj - Audio Device object
streamingInterfaceObj - Audio streaming interface object
interfaceSettingObj - Audio streaming interface setting object
Returns:
The bit resolution size of the audio streaming interface setting.
Remarks:
None.
*/
uint8_t USB_HOST_AUDIO_V1_StreamingInterfaceBitResolutionGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
// *****************************************************************************
/* Function:
uint8_t USB_HOST_AUDIO_V1_StreamingInterfaceSamplingFrequencyTypeGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
Summary:
Returns the sampling frequency type of the specified streaming interface
setting.
Description:
This function returns the sampling frequency type of the specified streaming
interface setting.
Precondition:
The Audio v1.0 Device should have been attached.
Parameters:
audioObj - Audio Device object
streamingInterfaceObj - Audio streaming interface object
interfaceSettingObj - Audio streaming interface setting object
Returns:
The sampling frequency type of the audio streaming interface setting.
- 0 - Continuous Sampling frequency is supported
- 1 to 255 - The number of discrete sampling frequencies supported by the
audio streaming interface
Remarks:
None.
*/
uint8_t USB_HOST_AUDIO_V1_StreamingInterfaceSamplingFrequencyTypeGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
// *****************************************************************************
/* Function:
uint8_t* USB_HOST_AUDIO_V1_StreamingInterfaceSamplingFrequenciesGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
Summary:
Returns the sampling frequencies supported by the specified streaming interface
setting.
Description:
This function returns the sampling frequencies supported by the specified streaming
interface setting.
Precondition:
The Audio v1.0 Device should have been attached.
Parameters:
audioObj - Audio Device object
streamingInterfaceObj - Audio streaming interface object
interfaceSettingObj - Audio streaming interface setting object
Returns:
A pointer to the sampling frequencies supported by the audio streaming
interface setting.
Remarks:
None.
*/
uint8_t* USB_HOST_AUDIO_V1_StreamingInterfaceSamplingFrequenciesGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_STREAM_DIRECTION USB_HOST_AUDIO_V1_StreamingInterfaceDirectionGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
Summary:
Returns the direction of the specified streaming interface setting.
Description:
This function returns the direction of the specified streaming
interface setting.
Precondition:
The Audio v1.0 Device should have been attached.
Parameters:
audioObj - Audio Device object
streamingInterfaceObj - Audio streaming interface object
interfaceSettingObj - Audio streaming interface setting object
Returns:
- USB_HOST_AUDIO_V1_DIRECTION_OUT - Host to Device
- USB_HOST_AUDIO_V1_DIRECTION_IN - Device to Host
Remarks:
None.
*/
USB_HOST_AUDIO_V1_STREAM_DIRECTION USB_HOST_AUDIO_V1_StreamingInterfaceDirectionGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ streamingInterfaceObj,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_REESULT USB_HOST_AUDIO_V1_ControlEntityGetFirst
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ * pEntityObject
);
Summary:
Retrieves the handle to the first audio control entity
Description:
This function retrieves the handle to the first audio control entity.
Precondition:
None.
Parameters:
audioObj - USB Host Audio v1.0 device object.
pEntityObject - pointer to the Audio control entity handle.
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_RESULT_END_OF_CONTROL_ENTITY - No more audio control
entities are available
- USB_HOST_AUDIO_V1_RESULT_OBJ_INVALID - The specified audio
stream does not exist
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_ControlEntityGetFirst
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ * pEntityObject
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_REESULT USB_HOST_AUDIO_V1_ControlEntityGetNext
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObjectCurrent
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ * pEntityObject
);
Summary:
Retrieves the handle to the next audio control entity.
Description:
This function retrieves the handle to the next audio control entity.
Precondition:
None.
Parameters:
audioObj - USB Host Audio v1.0 device object.
entityObjectCurrent - Handle to current audio control entity.
pEntityObject - pointer to audio control entity handle.
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_RESULT_END_OF_CONTROL_ENTITY - No more audio control
entities are available
- USB_HOST_AUDIO_V1_RESULT_OBJ_INVALID - The specified audio
stream does not exist
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_ControlEntityGetNext
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObjectCurrent,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ * pEntityObject
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_EntityObjectGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
uint8_t entityId,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ* entityObj
);
Summary:
Retrieves the entity object for the entity ID.
Description:
This function retrieves the entity object for the entity ID.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityId - Entity ID
entityObject - Audio control entity object
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_RESULT_FAILURE - The entity Id could not be found or an
unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_EntityObjectGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
uint8_t entityId,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ* entityObj
);
// *****************************************************************************
/* Function:
USB_AUDIO_V1_ENTITY_TYPE USB_HOST_AUDIO_V1_EntityTypeGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
Summary:
Returns the entity type of the audio control entity.
Description:
This function returns the entity type of the audio control entity.
Prior to calling this function the entity object should be obtained by calling
USB_HOST_AUDIO_V1_ControlEntityGetFirst, USB_HOST_AUDIO_V1_ControlEntityGetNext, or
USB_HOST_AUDIO_V1_EntityObjectGet.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityObject - Audio control entity object
Returns:
USB_AUDIO_V1_ENTITY_TYPE.
Remarks:
None.
*/
USB_AUDIO_V1_ENTITY_TYPE USB_HOST_AUDIO_V1_EntityTypeGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
// *****************************************************************************
/* Function:
uint8_t USB_HOST_AUDIO_V1_TerminalIDGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
Summary:
Returns the terminal ID of the audio control entity.
Description:
This function returns the Terminal ID of the Audio Control entity. Prior to
calling this function the entity object should be obtained by calling
USB_HOST_AUDIO_V1_ControlEntityGetFirst, USB_HOST_AUDIO_V1_ControlEntityGetNext,
or USB_HOST_AUDIO_V1_EntityObjectGet.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityObject - Audio control entity object
Returns:
The terminal ID of the audio control entity object.
Remarks:
None.
*/
uint8_t USB_HOST_AUDIO_V1_TerminalIDGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
// *****************************************************************************
/* Function:
USB_AUDIO_V1_TERMINAL_TYPE USB_HOST_AUDIO_V1_TerminalTypeGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
Summary:
Returns the terminal type of the audio control entity.
Description:
This function returns the Terminal type of the audio control entity. Prior to
calling this function Entity Object should be obtained by calling the
USB_HOST_AUDIO_V1_ControlEntityGetFirst,
USB_HOST_AUDIO_V1_ControlEntityGetNext, or
USB_HOST_AUDIO_V1_EntityObjectGet function.
Parameters:
audioObj - USB Host Audio v1.0 device object
entityObject - Audio control entity Object
Returns:
The terminal type.
Remarks:
None.
*/
USB_AUDIO_V1_TERMINAL_TYPE USB_HOST_AUDIO_V1_TerminalTypeGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
// *****************************************************************************
/* Function:
uint8_t USB_HOST_AUDIO_V1_TerminalAssociationGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
Summary:
Returns the associated terminal ID of the audio control terminal.
Description:
This function returns the ID of the associated terminal type of the audio control
terminal. Prior to calling this function the entity object should be obtained by
calling USB_HOST_AUDIO_V1_ControlEntityGetFirst, USB_HOST_AUDIO_V1_ControlEntityGetNext,
or USB_HOST_AUDIO_V1_EntityObjectGet.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityObject - Audio control entity object
Returns:
The ID of the associated terminal.
Remarks:
None.
*/
uint8_t USB_HOST_AUDIO_V1_TerminalAssociationGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
// *****************************************************************************
/* Function:
uint8_t USB_HOST_AUDIO_V1_TerminalInputChannelNumbersGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
Summary:
Returns the number of logical output channels in the terminal's output audio
channel cluster.
Description:
This function returns the number of logical output channels in the terminal's
output audio channel cluster. This function is only applicable to an input
terminal. Prior to calling this function the entity object should be obtained by
calling USB_HOST_AUDIO_V1_ControlEntityGetFirst, USB_HOST_AUDIO_V1_ControlEntityGetNext,
or USB_HOST_AUDIO_V1_EntityObjectGet.
Parameters:
audioObj - USB Host Audio v1.0 device object.
entityObject - Audio control entity object
Returns:
The number of logical output channels in the terminal's output audio channel
cluster.
Remarks:
None.
*/
uint8_t USB_HOST_AUDIO_V1_TerminalInputChannelNumbersGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
// *****************************************************************************
/* Function:
USB_AUDIO_CHANNEL_CONFIG USB_HOST_AUDIO_V1_TerminalInputChannelConfigGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
Summary:
Returns a structure that describes the spatial location of the logical
channels of in the terminal's output audio channel cluster.
Description:
This function returns a structure that describes the spatial location of
the logical channels of in the terminal's output audio channel cluster.
This function is only applicable to an input terminal. Prior to calling this
function the entity object should be obtained by calling
USB_HOST_AUDIO_V1_ControlEntityGetFirst, USB_HOST_AUDIO_V1_ControlEntityGetNext,
or USB_HOST_AUDIO_V1_EntityObjectGet.
Parameters:
audioObj - USB Host Audio v1.0 device object
entityObject - Audio control entity object
Returns:
The structure that describes the spatial location of the logical channels.
Remarks:
None.
*/
USB_AUDIO_CHANNEL_CONFIG USB_HOST_AUDIO_V1_TerminalInputChannelConfigGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
// *****************************************************************************
/* Function:
uint8_t USB_HOST_AUDIO_V1_TerminalSourceIDGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
Summary:
Returns the ID of the unit or terminal to which this terminal is connected.
Description:
This function returns the ID of the unit or terminal to which this terminal is
connected. This function is only applicable to an output terminal. Prior to
calling this function the entity object should be obtained by calling
USB_HOST_AUDIO_V1_ControlEntityGetFirst, USB_HOST_AUDIO_V1_ControlEntityGetNext,
or USB_HOST_AUDIO_V1_EntityObjectGet.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityObject - Audio control entity object
Returns:
The ID of the unit or terminal to which this terminal is connected.
Remarks:
None.
*/
uint8_t USB_HOST_AUDIO_V1_TerminalSourceIDGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
// *****************************************************************************
/* Function:
uint8_t USB_HOST_AUDIO_V1_FeatureUnitIDGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
Summary:
Returns ID of the Feature Unit.
Description:
This function returns the ID of the D of the Feature Unit. This function is
only applicable to Feature Unit. Prior to calling this function Entity
Object should be obtained by calling the
USB_HOST_AUDIO_V1_ControlEntityGetFirst,
USB_HOST_AUDIO_V1_ControlEntityGetNext, or
USB_HOST_AUDIO_V1_EntityObjectGet function.
Parameters:
audioObj - USB Host Audio v1.0 device object.
entityObject - Audio control entity Object
Returns:
The ID of the feature unit.
Remarks:
None.
*/
uint8_t USB_HOST_AUDIO_V1_FeatureUnitIDGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
// *****************************************************************************
/* Function:
uint8_t USB_HOST_AUDIO_V1_FeatureUnitSourceIDGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
Summary:
Returns the ID of the unit or terminal to which this feature unit is connected.
Description:
This function returns the ID of the Unit or Terminal to which this feature unit
is connected. This function is only applicable to a feature unit. Prior to
calling this function the entity object should be obtained by calling
USB_HOST_AUDIO_V1_ControlEntityGetFirst,
USB_HOST_AUDIO_V1_ControlEntityGetNext, or
USB_HOST_AUDIO_V1_EntityObjectGet.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityObject - Audio control entity object
Returns:
The ID of the unit or terminal to which this feature unit is connected.
Remarks:
None.
*/
uint8_t USB_HOST_AUDIO_V1_FeatureUnitSourceIDGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
// *****************************************************************************
/* Function:
uint8_t USB_HOST_AUDIO_V1_FeatureUnitChannelNumbersGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
Summary:
Returns the number of channels.
Description:
This function returns the number of channels. This function is only applicable
to a feature unit. Prior to calling this function the entity object should be
obtained by calling USB_HOST_AUDIO_V1_ControlEntityGetFirst,
USB_HOST_AUDIO_V1_ControlEntityGetNext, or
USB_HOST_AUDIO_V1_EntityObjectGet.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityObject - Audio control entity object
Returns:
The number of channels.
Remarks:
None.
*/
uint8_t USB_HOST_AUDIO_V1_FeatureUnitChannelNumbersGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject
);
// *****************************************************************************
/* Function:
bool USB_HOST_AUDIO_V1_FeatureUnitChannelMuteExists
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
uint8_t channel
);
Summary:
Returns "true" if mute control exists for the specified channel of the
feature unit.
Description:
This function returns "true" if mute control exists on the specified channel
of the feature unit. Channel 0 indicates Master mute control.
This function is only applicable to a feature unit. Prior to calling this
function the entity object should be obtained by calling
USB_HOST_AUDIO_V1_ControlEntityGetFirst,
USB_HOST_AUDIO_V1_ControlEntityGetNext, or
USB_HOST_AUDIO_V1_EntityObjectGet.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityObject - Audio control entity object
channel - Channel number
Returns:
- true - Mute control exists on the specified channel
- false - Mute control does not exist on the specified channel
Remarks:
None.
*/
bool USB_HOST_AUDIO_V1_FeatureUnitChannelMuteExists
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
uint8_t channel
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_FeatureUnitChannelMuteSet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
USB_HOST_AUDIO_V1_REQUEST_HANDLE * requestHandle,
uint8_t channelNumber,
bool *muteStatus
);
Summary:
Schedules a set mute control request to the specified channel.
Description:
This function schedules a set mute control request to the specified
channel. Prior to calling this function the user should check if mute control
exists on the specified channel by calling the
USB_HOST_AUDIO_V1_FeatureUnitChannelMuteExists function.
If the request was scheduled successfully, the requestHandle parameter will
contain a request handle that uniquely identifies this transfer. If the transfer
could not be scheduled successfully, requestHandle will contain
USB_HOST_AUDIO_V1_REQUEST_HANDLE_INVALID.
When the control request completes, the Audio v1.0 Client Driver will call
the callback function that was set using the
USB_HOST_AUDIO_V1_EntityRequestCallbackSet function. The context
parameter specified here will be returned in the callback.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityObject - Audio control entity object
requestHandle - Output parameter that will contain the handle to this request
channelNumber - Channel Number
muteStatus - Value of mute control, where 1 mutes the channel and 0 removes unmutes
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The request was scheduled successfully.
requestHandle will contain a valid request handle.
- USB_HOST_AUDIO_V1_RESULT_BUSY - The control request mechanism is currently busy.
Retry the request.
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred. requestHandle will
contain USB_HOST_AUDIO_V1_0_REQUEST_HANDLE_INVALID.
- USB_HOST_AUDIO_V1_RESULT_PARAMETER_INVALID - The data pointer or requestHandle pointer
is NULL
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_FeatureUnitChannelMuteSet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
USB_HOST_AUDIO_V1_REQUEST_HANDLE * requestHandle,
uint8_t channelNumber,
bool *muteStatus
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_FeatureUnitChannelMuteGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
USB_HOST_AUDIO_V1_REQUEST_HANDLE * requestHandle,
uint8_t channelNumber,
bool *muteStatus
);
Summary:
Schedules a get mute control request to the specified channel.
Description:
This function schedules a get mute control request to the specified
channel. Prior to calling this function the user should check if mute control
exists on the specified channel by calling the
USB_HOST_AUDIO_V1_FeatureUnitChannelMuteExists function.
If the request was scheduled successfully, the requestHandle parameter will contain a
request handle that uniquely identifies this request. If the transfer
could not be scheduled successfully, requestHandle will contain
USB_HOST_AUDIO_V1_REQUEST_HANDLE_INVALID.
When the control request completes, the Audio v1.0 Client Driver will call
the callback function that was set using the
USB_HOST_AUDIO_V1_EntityRequestCallbackSet function. The context
parameter specified here will be returned in the callback.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityObject - Audio control entity object
requestHandle - Output parameter that will contain the handle to this request
channelNumber - Channel number
muteStatus - Output parameter that will contain Current Mute status when
the request is completed and a callback is received
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The request was scheduled successfully.
requestHandle will contain a valid request handle.
- USB_HOST_AUDIO_V1_RESULT_BUSY - The control request mechanism is currently
busy. Retry the request.
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred.
requestHandle will contain USB_HOST_AUDIO_V1_0_REQUEST_HANDLE_INVALID.
- USB_HOST_AUDIO_V1_RESULT_PARAMETER_INVALID - The data pointer or
requestHandle pointer is NULL
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_FeatureUnitChannelMuteGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
USB_HOST_AUDIO_V1_REQUEST_HANDLE * requestHandle,
uint8_t channelNumber,
bool *muteStatus
);
// *****************************************************************************
/* Function:
bool USB_HOST_AUDIO_V1_FeatureUnitChannelVolumeExists
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
uint8_t channel
);
Summary:
Returns "true" if volume control exists for the specified channel of the
feature unit.
Description:
This function returns "true" if volume control exists on the specified
channel of the feature unit. Channel 0 indicates master volume control.
This function is only applicable to a feature unit. Prior to calling this
function the entity object should be obtained by calling
USB_HOST_AUDIO_V1_ControlEntityGetFirst,
USB_HOST_AUDIO_V1_ControlEntityGetNext, or
USB_HOST_AUDIO_V1_EntityObjectGet.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityObject - Audio control entity object
channel - Channel number
Returns:
- true - Volume control exists on the specified channel
- false - Volume control does not exist on the specified channel
Remarks:
None.
*/
bool USB_HOST_AUDIO_V1_FeatureUnitChannelVolumeExists
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
uint8_t channel
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_FeatureUnitChannelVolumeSet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
USB_HOST_AUDIO_V1_REQUEST_HANDLE * requestHandle,
uint8_t channelNumber,
uint16_t *volume
);
Summary:
Schedules a set current volume control request to the specified channel.
Description:
This function schedules a set current volume request to the specified
channel. Prior to calling this function the user should check if volume
control exists on the specified channel by calling the
USB_HOST_AUDIO_V1_FeatureUnitChannelVolumeExists function.
If the request was scheduled successfully, the requestHandle parameter will contain a
request handle that uniquely identifies this request. If the request
could not be scheduled successfully, requestHandle will contain
USB_HOST_AUDIO_V1_REQUEST_HANDLE_INVALID.
When the control request completes, the Audio v1.0 Client Driver will call
the callback function that was set using the
USB_HOST_AUDIO_V1_EntityRequestCallbackSet function. The context
parameter specified here will be returned in the callback.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityObject - Audio control entity object
requestHandle - Output parameter that will contain the handle to this request
channelNumber - Channel number to which the volume control is addressed
volume - Current volume control value that should be set in the Audio Device
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The request was scheduled successfully.
requestHandle will contain a valid request handle.
- USB_HOST_AUDIO_V1_RESULT_BUSY - The control request mechanism is currently
busy. Retry the request.
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred.
requestHandle will contain USB_HOST_AUDIO_V1_0_REQUEST_HANDLE_INVALID.
- USB_HOST_AUDIO_V1_RESULT_PARAMETER_INVALID - The data pointer or
requestHandle pointer is NULL
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_FeatureUnitChannelVolumeSet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
USB_HOST_AUDIO_V1_REQUEST_HANDLE * requestHandle,
uint8_t channelNumber,
uint16_t *volume
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_FeatureUnitChannelVolumeGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
USB_HOST_AUDIO_V1_REQUEST_HANDLE * requestHandle,
uint8_t channelNumber,
uint16_t *volume
);
Summary:
Schedules a get current volume control request to the specified channel.
Description:
This function schedules a get current volume control request to the
specified channel. Prior to calling this function the user should check if
volume control exists on the specified channel by calling the
USB_HOST_AUDIO_V1_FeatureUnitChannelVolumeExists function.
If the request was scheduled successfully, the requestHandle parameter will
contain a request handle that uniquely identifies this request. If the request
could not be scheduled successfully, requestHandle will contain
USB_HOST_AUDIO_V1_REQUEST_HANDLE_INVALID.
When the control request completes, the Audio v1.0 Client Driver will call
the callback function that was set using the
USB_HOST_AUDIO_V1_EntityRequestCallbackSet function. The context
parameter specified here will be returned in the callback.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityObject - Audio control entity object
requestHandle - Output parameter that will contain the handle to this request
channelNumber - Channel number to which the volume control is addressed
volume - Output parameter that will contain the current volume when a
request is completed and a callback is received
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The request was scheduled successfully.
requestHandle will contain a valid request handle.
- USB_HOST_AUDIO_V1_RESULT_BUSY - The control request mechanism is currently
busy. Retry the request.
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred.
requestHandle will contain USB_HOST_AUDIO_V1_0_REQUEST_HANDLE_INVALID
- USB_HOST_AUDIO_V1_RESULT_PARAMETER_INVALID - The data pointer or
requestHandle pointer is NULL
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_FeatureUnitChannelVolumeGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
USB_HOST_AUDIO_V1_REQUEST_HANDLE * requestHandle,
uint8_t channelNumber,
uint16_t *volume
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_FeatureUnitChannelVolumeSubRangeNumbersGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
USB_HOST_AUDIO_V1_REQUEST_HANDLE * requestHandle,
uint8_t channelNumber,
uint16_t *nSubRanges
);
Summary:
Schedules a control request to an Audio Device feature unit to get the number
of sub-ranges supported by the volume control on the specified channel.
Description:
This function schedules a control request to the Audio Device feature unit to
get the number of sub-ranges supported by the volume control on the specified
channel. Prior to calling this function the user should check if volume control
exists on the specified channel by calling the
USB_HOST_AUDIO_V1_FeatureUnitChannelVolumeExists function.
If the request was scheduled successfully, the requestHandle parameter will contain a
request handle that uniquely identifies this request. If the request
could not be scheduled successfully, requestHandle will contain
USB_HOST_AUDIO_V1_REQUEST_HANDLE_INVALID.
When the control request completes, the Audio v1.0 Client Driver will call
the callback function that was set using the
USB_HOST_AUDIO_V1_EntityRequestCallbackSet function. The context
parameter specified here will be returned in the callback.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityObject - Audio control entity object
requestHandle - Output parameter that will contain the handle to this request
channelNumber - Channel number to which the volume control is addressed
nSubRanges - Output parameter that will contain the number of sub-ranges
when the request is completed and a callback is received
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The request was scheduled successfully.
requestHandle will contain a valid request handle.
- USB_HOST_AUDIO_V1_RESULT_BUSY - The control request mechanism is currently
busy. Retry the request.
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred.
requestHandle will contain USB_HOST_AUDIO_V1_0_REQUEST_HANDLE_INVALID.
- USB_HOST_AUDIO_V1_RESULT_PARAMETER_INVALID - The data pointer or
requestHandle pointer is NULL
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_FeatureUnitChannelVolumeSubRangeNumbersGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
USB_HOST_AUDIO_V1_REQUEST_HANDLE * requestHandle,
uint8_t channelNumber,
uint16_t *nSubRanges
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_FeatureUnitChannelVolumeRangeGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
USB_HOST_AUDIO_V1_REQUEST_HANDLE * requestHandle,
uint8_t channelNumber,
void * data,
size_t size
);
Summary:
Schedules a control request to the Audio Device feature unit to get the range
supported by the volume control on the specified channel.
Description:
This function schedules a control request to the Audio Device feature unit to get
the range supported by the volume control on the specified channel.
Prior to calling this function the user should call the
USB_HOST_AUDIO_V1_FeatureUnitChannelVolumeSubRangeNumbersGet function
to know how many sub-ranges are supported.
Users should calculate the 'size' parameter of this function, as follows:
<c>size = Size of number of ranges + nSubRanges * (Size (MIN) + Size (MAX) + Size of (RES))</c>
If the request was scheduled successfully, the requestHandle parameter will contain a
request handle that uniquely identifies this request. If the request
could not be scheduled successfully, requestHandle will contain
USB_HOST_AUDIO_V1_REQUEST_HANDLE_INVALID.
When the control request completes, the Audio v1.0 Client Driver will call
the callback function that was set using the
USB_HOST_AUDIO_V1_EntityRequestCallbackSet function. The context
parameter specified here will be returned in the callback.
Parameters:
audioObj - USB Host Audio v1.0 Device object
entityObject - Audio control entity object
requestHandle - Output parameter that will contain the handle to this request
channelNumber - Channel number to which the volume control is addressed
nSubRanges - Output parameter that will contain the number of sub-ranges
when the request is completed and a callback is received
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The request was scheduled successfully.
requestHandle will contain a valid request handle.
- USB_HOST_AUDIO_V1_RESULT_BUSY - The control request mechanism is currently
busy. Retry the request.
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred.
requestHandle will contain USB_HOST_AUDIO_V1_0_REQUEST_HANDLE_INVALID.
- USB_HOST_AUDIO_V1_RESULT_PARAMETER_INVALID - The data pointer or
requestHandle pointer is NULL
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_FeatureUnitChannelVolumeRangeGet
(
USB_HOST_AUDIO_V1_OBJ audioObj,
USB_HOST_AUDIO_V1_CONTROL_ENTITY_OBJ entityObject,
USB_HOST_AUDIO_V1_REQUEST_HANDLE * requestHandle,
uint8_t channelNumber,
void * data,
size_t size
);
// ****************************************************************************
// ****************************************************************************
// Section: Audio Stream Access Functions
// ****************************************************************************
// ****************************************************************************
// ****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_STREAM_HANDLE USB_HOST_AUDIO_V1_StreamOpen
(
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ audiostreamingInterfaceObj
);
Summary:
Opens the specified audio stream.
Description:
This function will open the specified audio stream. Once opened, the audio
stream can be accessed via the handle that this function returns. The
audiostreamingInterfaceObj parameter is the value returned in the
USB_HOST_AUDIO_V1_StreamingInterfaceGetFirst or
USB_HOST_AUDIO_V1_StreamingInterfaceGetNext functions.
Precondition:
The audio streaming interface object should be valid.
Input:
audiostreamingInterfaceObj - Audio streaming interface object
Return:
Will return a valid handle if the audio stream could be opened
successfully. Otherwise, USB_HOST_AUDIO_V1_RESULT_HANDLE_INVALID is returned.
The function will return a valid handle if the stream is ready to be opened.
Remarks:
None.
*/
USB_HOST_AUDIO_V1_STREAM_HANDLE USB_HOST_AUDIO_V1_StreamOpen
(
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_OBJ audiostreamingInterfaceObj
);
// ****************************************************************************
/* Function:
void USB_HOST_AUDIO_V1_StreamClose
(
USB_HOST_AUDIO_V1_STREAM_HANDLE audioSteamHandle
);
Summary:
Closes the audio stream.
Description:
This function will close the open audio stream. This closes the association
between the application entity that opened the audio stream and the audio
stream. The audio stream handle becomes invalid.
Precondition:
None.
Parameters:
audioSteamHandle - handle to the audio stream obtained from the
USB_HOST_AUDIO_V1_StreamOpen function.
Returns:
None.
Remarks:
The device handle becomes invalid after calling this function.
*/
void USB_HOST_AUDIO_V1_StreamClose
(
USB_HOST_AUDIO_V1_STREAM_HANDLE audioStreamHandle
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamEventHandlerSet
(
USB_HOST_AUDIO_V1_STREAM_HANDLE handle,
USB_HOST_AUDIO_V1_STREAM_EVENT_HANDLER appAudioHandler,
uintptr_t context
);
Summary:
Registers an event handler with the Audio v1.0 Client Driver stream.
Description:
This function registers a client specific Audio v1.0 stream event handler.
The Audio v1.0 Host Client Driver will call the appAudioHandler function
specified as the second argument with relevant event and associated event data
in response to audio stream data transfers that have been scheduled by the
client.
Precondition:
None.
Parameters:
handle - The handle to the Audio v1.0 stream
eventHandler - A pointer to event handler function. If NULL, events
will not be generated.
context - The application specific context that is returned in the event handler
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_RESULT_HANDLE_INVALID - The specified audio
stream does not exist
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamEventHandlerSet
(
USB_HOST_AUDIO_V1_STREAM_HANDLE handle,
USB_HOST_AUDIO_V1_STREAM_EVENT_HANDLER appAudioHandler,
uintptr_t context
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamingInterfaceSet
(
USB_HOST_AUDIO_V1_STREAM_HANDLE streamHandle,
USB_INTERFACE_DESCRIPTOR* pInterfaceDesc,
USB_HOST_AUDIO_V1_REQUEST_HANDLE * requestHandle
);
Summary:
Schedules a SET_INTERFACE request to the specified audio stream.
Description:
This function schedules an audio stream enable request for the specified
audio stream. An audio stream must be enable before scheduling any data
transfer with the stream. A USB_HOST_AUDIO_V1_STREAM_EVENT_ENABLE_COMPLETE
event is generated when this request is completed.
USB_HOST_AUDIO_V1_STREAM_EVENT_ENABLE_COMPLETE_DATA returns the status and
request handle of the request.
Precondition:
The audio stream should have been opened. Only one audio stream from an audio
stream group can be enabled at a time.
Parameters:
streamHandle - Handle to the Audio v1.0 stream.
requestHandle - Handle to the stream enable request.
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_RESULT_HANDLE_INVALID - The specified audio
stream does not exist
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamingInterfaceSet
(
USB_HOST_AUDIO_V1_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_REQUEST_HANDLE * requestHandle,
USB_HOST_AUDIO_V1_STREAMING_INTERFACE_SETTING_OBJ interfaceSettingObj
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamSamplingFrequencySet
(
USB_HOST_AUDIO_V1_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_REQUEST_HANDLE requestHandle,
uint32_t *samplingFrequency
)
Summary:
Schedules an audio stream set sampling rate request for the specified
audio stream.
Description:
This function schedules an audio stream set sampling rate request for the
specified audio stream. A USB_HOST_AUDIO_V1_STREAM_EVENT_SAMPLING_RATE_SET_COMPLETE
event is generated when this request is completed.
USB_HOST_AUDIO_V1_STREAM_EVENT_SAMPLING_RATE_SET_COMPLETE_DATA returns
the status and request handle of the request.
Precondition:
The audio stream should have been opened.
Parameters:
streamHandle - Handle to the Audio v1.0 stream
requestHandle - Handle to the stream set sampling rate request
samplingRate - Pointer to the sampling rate
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_RESULT_HANDLE_INVALID - The specified audio
stream does not exist
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamSamplingFrequencySet
(
USB_HOST_AUDIO_V1_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_REQUEST_HANDLE *requestHandle,
const uint32_t *samplingFrequency
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamSamplingFrequencyGet
(
USB_HOST_AUDIO_V1_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_REQUEST_HANDLE requestHandle,
uint32_t *samplingFrequency
)
Summary:
Schedules an audio stream get sampling rate request for the specified
audio stream.
Description:
This function schedules an audio stream set sampling rate request for the
specified audio stream. A USB_HOST_AUDIO_V1_STREAM_EVENT_SAMPLING_RATE_SET_COMPLETE
event is generated when this request is completed.
USB_HOST_AUDIO_V1_STREAM_EVENT_SAMPLING_RATE_SET_COMPLETE_DATA returns
the status and request handle of the request.
Precondition:
The audio stream should have been opened.
Parameters:
streamHandle - Handle to the Audio v1.0 stream
requestHandle - Handle to the stream set sampling rate request
samplingRate - Pointer to the sampling rate
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_RESULT_HANDLE_INVALID - The specified audio
stream does not exist
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamSamplingFrequencyGet
(
USB_HOST_AUDIO_V1_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_REQUEST_HANDLE *requestHandle,
uint32_t *samplingFrequency
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamWrite
(
USB_HOST_AUDIO_V1_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_STREAM_TRANSFER_HANDLE * transferHandle,
void * source,
size_t length
);
Summary:
Schedules an audio stream write request for the specified audio stream.
Description:
This function schedules an audio stream write request for the specified
audio stream. A USB_HOST_AUDIO_V1_STREAM_EVENT_WRITE_COMPLETE event is
generated when this request is completed.
USB_HOST_AUDIO_V1_STREAM_EVENT_WRITE_COMPLETE_DATA returns
the status and request handle of the request.
Precondition:
The audio stream should have been opened and enabled. The direction of the
audio stream should be USB_HOST_AUDIO_V1_DIRECTION_OUT.
Parameters:
streamHandle - Handle to the Audio v1.0 stream
transferHandle - Handle to the stream write transfer request
source - Pointer to the buffer containing data to be written to the
device
length - Amount of data to write (in bytes)
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_RESULT_HANDLE_INVALID - The specified audio
stream does not exist
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamWrite
(
USB_HOST_AUDIO_V1_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_STREAM_TRANSFER_HANDLE * transferHandle,
void * source,
size_t length
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamRead
(
USB_HOST_AUDIO_V1_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_STREAM_TRANSFER_HANDLE * transferHandle,
void * source,
size_t length
);
Summary:
Schedules an audio stream read request for the specified audio stream.
Description:
This function schedules an audio stream read request for the specified
audio stream. A USB_HOST_AUDIO_V1_STREAM_EVENT_READ_COMPLETE event is generated
when this request is completed.
USB_HOST_AUDIO_V1_STREAM_EVENT_READ_COMPLETE_DATA returns
the status and request handle of the request.
Precondition:
The audio stream should have been opened and enabled. The direction of the
audio stream should be USB_HOST_AUDIO_V1_DIRECTION_IN.
Parameters:
streamHandle - Handle to the Audio v1.0 stream
transferHandle - Handle to the stream read transfer request
source - Pointer to the buffer containing data to be read from the
device
length - Amount of data to read (in bytes)
Returns:
- USB_HOST_AUDIO_V1_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_RESULT_HANDLE_INVALID - The specified audio
stream does not exist
- USB_HOST_AUDIO_V1_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_StreamRead
(
USB_HOST_AUDIO_V1_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_STREAM_TRANSFER_HANDLE * transferHandle,
void * source,
size_t length
);
// *****************************************************************************
// Section: Global Data Types. This section is specific to PIC32 implementation
// of the USB Host Audio V1 Client Driver
// *****************************************************************************
// *****************************************************************************
// *****************************************************************************
/* USB HOST Audio V1 Client Driver Interface
Summary:
USB HOST Audio v1.0 Client Driver interface.
Description:
This macro should be used by the application in the TPL table while adding
support for the USB Audio v1.0 Host Client Driver.
Remarks:
None.
*/
/*DOM-IGNORE-BEGIN*/extern const USB_HOST_CLIENT_DRIVER gUSBHostAudioV1Driver; /*DOM-IGNORE-END*/
#define USB_HOST_AUDIO_V1_INTERFACE /*DOM-IGNORE-BEGIN*/&gUSBHostAudioV1Driver /*DOM-IGNORE-END*/
// DOM-IGNORE-BEGIN
#ifdef __cplusplus // Provide C++ Compatibility
extern "C" {
#endif
// DOM-IGNORE-END
// *****************************************************************************
// *****************************************************************************
// Section: Depreciated API - Not recommended for new applications
// *****************************************************************************
// *****************************************************************************
// *****************************************************************************
/* USB Host Audio v1.0 Object
Summary:
Defines the type of the Audio v1.0 Host client object.
Description:
This type defines the type of the Audio Host client object. This type
is returned by the attach event handler and is used by the application to
open the attached Audio v1.0 Device.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_0_OBJ USB_HOST_AUDIO_V1_OBJ
// *****************************************************************************
/* USB Host Audio v1.0 Stream Object
Summary:
Defines the type of the Audio v1.0 Host stream object.
Description:
This type defines the type of the Audio v1.0 Host stream object. This type
is returned by USB_AUDIO_V1_0_StreamGetFirst and USB_AUDIO_V1_0_StreamGetNext
as part of USB_HOST_AUDIO_V1_0_STREAM_INFO structure.
Remarks:
None.
*/
typedef uintptr_t USB_HOST_AUDIO_V1_0_STREAM_OBJ;
// *****************************************************************************
/* USB Host Audio v1.0 Client Driver Request Handle
Summary:
USB Host Audio v1.0 Client Driver request handle.
Description:
This is returned by the Audio v1.0 Client Driver command routines and should
be used by the application to track the command especially in cases where
transfers are queued.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_0_REQUEST_HANDLE USB_HOST_AUDIO_V1_REQUEST_HANDLE
// *****************************************************************************
/* USB Host Audio v1.0 Client Driver Invalid Request Handle
Summary:
USB Host Audio v1.0 Client Driver invalid request handle.
Description:
This is returned by the Audio v1.0 Client Driver command routines when the request
could not be scheduled.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_0_REQUEST_HANDLE_INVALID ((USB_HOST_AUDIO_V1_0_REQUEST_HANDLE)(-1))
// *****************************************************************************
/* USB HOST Audio Client Driver Interface
Summary:
USB HOST Audio Client Driver interface.
Description:
This macro should be used by the application in the TPL table while adding
support for the USB Audio Host Client Driver.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_0_INTERFACE (void*)USB_HOST_AUDIO_V1_INTERFACE
// *****************************************************************************
/* USB Host Audio stream handle
Summary:
Defines the type of the Audio v1.0 Host stream handle.
Description:
This type defines the type of the handle returned by the
USB_HOST_AUDIO_V1_0_StreamOpen function. This application uses this
handle to interact with an audio stream.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_0_STREAM_HANDLE USB_HOST_AUDIO_V1_STREAM_HANDLE
// *****************************************************************************
/* USB Host Audio stream Invalid handle
Summary:
Defines the type of the Audio v1.0 Host stream invalid handle.
Description:
This is returned by the USB_HOST_AUDIO_V1_0_StreamOpen function when
a stream open request has failed.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_0_STREAM_HANDLE_INVALID USB_HOST_AUDIO_V1_STREAM_HANDLE_INVALID
// *****************************************************************************
/* USB Host Audio v1.0 Class Driver Transfer Handle
Summary:
USB Host Audio v1.0 Class Driver transfer handle.
Description:
This is returned by the Audio v1.0 Class Driver command and data transfer
routines and should be used by the application to track the transfer
especially in cases where transfers are queued.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_0_STREAM_TRANSFER_HANDLE USB_HOST_AUDIO_V1_STREAM_TRANSFER_HANDLE
// *****************************************************************************
/* USB Host Audio v1.0 Class Driver Invalid Transfer Handle Definition
Summary:
USB Host Audio v1.0 Class Driver invalid transfer handle definition.
Description:
This macro defines a USB Host Audio v1.0 Class Driver invalid transfer
handle. A invalid transfer handle is returned by the Audio v1.0 Class Driver
data and command transfer routines when the request was not successful.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_0_STREAM_TRANSFER_HANDLE_INVALID USB_HOST_AUDIO_V1_STREAM_TRANSFER_HANDLE_INVALID
// *****************************************************************************
/* USB Host Audio v1.0 Class Driver Result enumeration.
Summary:
USB Host Audio v1.0 Class Driver audio result enumeration.
Description:
This enumeration lists the possible USB Host Audio v1.0 Class Driver operation
results. These values are returned by Audio v1.0 Class Driver functions.
Remarks:
None.
*/
typedef enum
{
/* The transfer or request could not be scheduled because internal
* queues are full. The request or transfer should be retried */
USB_HOST_AUDIO_V1_0_RESULT_BUSY = /* DOM-IGNORE-BEGIN */ USB_HOST_RESULT_REQUEST_BUSY, /* DOM-IGNORE-END*/
/* The transfer or requested was aborted */
USB_HOST_AUDIO_V1_0_RESULT_TRANSFER_ABORTED,
/* The request was stalled */
USB_HOST_AUDIO_V1_0_RESULT_REQUEST_STALLED,
/* The specified Audio v1.0 Object is Invalid */
USB_HOST_AUDIO_V1_0_RESULT_OBJ_INVALID,
/* No more audio stream present in the Device */
USB_HOST_AUDIO_V1_0_RESULT_END_OF_STREAM_LIST,
/* DOM-IGNORE-BEGIN */
USB_HOST_AUDIO_V1_0_RESULT_ERROR_INTERFACE_UNKNOWN,
/* DOM-IGNORE-END*/
/* A required parameter was invalid */
USB_HOST_AUDIO_V1_0_RESULT_PARAMETER_INVALID,
/* DOM-IGNORE-BEGIN */
USB_HOST_AUDIO_V1_0_RESULT_CONFIGURATION_UNKNOWN,
USB_HOST_AUDIO_V1_0_RESULT_BUS_NOT_ENABLED,
USB_HOST_AUDIO_V1_0_RESULT_BUS_UNKNOWN,
/* DOM-IGNORE-END*/
/* The specified device does not exist in the system */
USB_HOST_AUDIO_V1_0_RESULT_DEVICE_UNKNOWN,
/* An unknown failure has occurred */
USB_HOST_AUDIO_V1_0_RESULT_FAILURE,
/* Indicates a false condition */
USB_HOST_AUDIO_V1_0_RESULT_FALSE = 0,
/* Indicate a true condition */
USB_HOST_AUDIO_V1_0_RESULT_TRUE = 1,
/* Indicates that the operation succeeded or the request was accepted and
will be processed. */
USB_HOST_AUDIO_V1_0_RESULT_SUCCESS = USB_HOST_RESULT_TRUE
}
USB_HOST_AUDIO_V1_0_RESULT;
// *****************************************************************************
/* USB Host Audio v1.0 Stream Result enumeration.
Summary:
USB Host Audio v1.0 stream result enumeration.
Description:
This enumeration lists the possible USB Host Audio v1.0 stream operation
results. These values are returned by Audio v1.0 stream functions.
Remarks:
None.
*/
typedef enum
{
/* The transfer or request could not be scheduled because internal
* queues are full. The request or transfer should be retried */
USB_HOST_AUDIO_V1_0_STREAM_RESULT_REQUEST_BUSY = USB_HOST_RESULT_REQUEST_BUSY,
/* Request was aborted */
USB_HOST_AUDIO_V1_0_STREAM_RESULT_TRANSFER_ABORTED,
/* Request was stalled */
USB_HOST_AUDIO_V1_0_STREAM_RESULT_REQUEST_STALLED,
/* The specified Stream Handle is not valid */
USB_HOST_AUDIO_V1_0_STREAM_RESULT_HANDLE_INVALID,
/* The end of the device list was reached.*/
USB_HOST_AUDIO_V1_0_STREAM_RESULT_END_OF_DEVICE_LIST,
/* The specified interface is not available */
USB_HOST_AUDIO_V1_0_STREAM_RESULT_INTERFACE_UNKNOWN,
/* A NULL parameter was passed to the function */
USB_HOST_AUDIO_V1_0_STREAM_RESULT_PARAMETER_INVALID,
/* The specified configuration does not exist on this device.*/
USB_HOST_AUDIO_V1_0_STREAM_RESULT_CONFIGURATION_UNKNOWN,
/* A bus operation was requested but the bus was not operated */
USB_HOST_AUDIO_V1_0_STREAM_RESULT_BUS_NOT_ENABLED,
/* The specified bus does not exist in the system */
USB_HOST_AUDIO_V1_0_STREAM_RESULT_BUS_UNKNOWN,
/* The specified audio stream does not exist in the system */
USB_HOST_AUDIO_V1_0_STREAM_RESULT_UNKNOWN,
/* An unknown failure has occurred */
USB_HOST_AUDIO_V1_0_STREAM_RESULT_FAILURE,
/* Indicates a false condition */
USB_HOST_AUDIO_V1_0_STREAM_RESULT_FALSE = 0,
/* Indicate a true condition */
USB_HOST_AUDIO_V1_0_STREAM_RESULT_TRUE = 1,
/* Indicates that the operation succeeded or the request was accepted and
will be processed. */
USB_HOST_AUDIO_V1_0_STREAM_SUCCESS = USB_HOST_RESULT_TRUE
}USB_HOST_AUDIO_V1_0_STREAM_RESULT;
// *****************************************************************************
/* USB Host Audio v1.0 Event Handler Return Type
Summary:
Returns the type of the USB Audio v1.0 Host Client Driver event handler.
Description:
This enumeration lists the possible return values of the USB Audio v1.0 Host
Client Driver event handler.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_0_STREAM_EVENT_RESPONSE USB_HOST_AUDIO_V1_STREAM_EVENT_RESPONSE
// *****************************************************************************
/* USB Host Audio v1.0 Stream Event Handler Return Type
Summary:
Returns the type of the USB Host Audio v1.0 stream event handler.
Description:
This enumeration lists the possible return values of the USB Host Audio v1.0
stream event handler.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_0_STREAM_EVENT_RESPONSE_NONE USB_HOST_AUDIO_V1_STREAM_EVENT_RESPONSE_NONE
// *****************************************************************************
/* Audio v1.0 Class Driver Events
Summary:
Identifies the possible events that the Audio v1.0 Class Driver can generate.
Description:
This enumeration identifies the possible events that the Audio v1.0 Class Driver
can generate. The application should register an event handler using the
USB_HOST_AUDIO_V1_0_AttachEventHandlerSet function to receive Audio v1.0 Class
Driver events.
*/
#define USB_HOST_AUDIO_V1_0_EVENT USB_HOST_AUDIO_V1_EVENT
#define USB_HOST_AUDIO_V1_0_EVENT_ATTACH USB_HOST_AUDIO_V1_EVENT_ATTACH
#define USB_HOST_AUDIO_V1_0_EVENT_DETACH USB_HOST_AUDIO_V1_EVENT_DETACH
// *****************************************************************************
/* Audio v1.0 Stream Events
Summary:
Identifies the possible events that the Audio v1.0 stream can generate.
Description:
This enumeration identifies the possible events that the Audio v1.0 stream
can generate. The application should register an event handler using the
USB_HOST_AUDIO_V1_0_StreamEventHandlerSet function to receive Audio v1.0
stream events.
An event may have data associated with it. Events that are generated due to
a transfer of data between the Host and Device are accompanied by data
structures that provide the status of the transfer termination. For example,
the USB_HOST_AUDIO_V1_0_STREAM_EVENT_READ_COMPLETE event is accompanied by a
pointer to a USB_HOST_AUDIO_V1_0_STREAM_EVENT_READ_COMPLETE_DATA data
structure. The transferStatus member of this data structure indicates the
success or failure of the transfer. A transfer may fail due to the Device not
responding on the bus if the Device stalls any stages of the transfer or
due to NAK time-outs. The event description provides details on the nature of
the event and the data that is associated with the event.
*/
typedef enum
{
/* This event occurs when a Audio v1.0 stream read operation has completed
(i.e., when the data has been received from the connected Audio v1.0 stream).
This event is generated after the application calls the
USB_HOST_AUDIO_V1_0_StreamRead function. The eventData parameter in the
event callback function will be of a pointer to a
USB_HOST_AUDIO_V1_0_STREAM_EVENT_READ_COMPLETE_DATA structure. This
contains details about the transfer handle associated with this read
request, the amount of data read and the termination status of the read
request. */
USB_HOST_AUDIO_V1_0_STREAM_EVENT_READ_COMPLETE,
/* This event occurs when an Audio v1.0 stream write operation has completed
(i.e., when the data has been written to the connected Audio v1.0 stream).
This event is generated after the application calls the
USB_HOST_AUDIO_V1_0_StreamWrte function. The eventData parameter in the
event callback function will be of a pointer to a
USB_HOST_AUDIO_V1_0_STREAM_EVENT_WRITE_COMPLETE_DATA structure. This
contains details about the transfer handle associated with this write
request, the amount of data written and the termination status of the
write request. */
USB_HOST_AUDIO_V1_0_STREAM_EVENT_WRITE_COMPLETE,
/* This event occurs when an Audio v1.0 stream enable request has been
completed. This event is generated after the application calls the
USB_HOST_AUDIO_V1_0_StreamEnable function. The eventData parameter in the
event callback function will be of a pointer to a
USB_HOST_AUDIO_V1_0_STREAM_EVENT_ENABLE_COMPLETE_DATA. This contains details
about the request handle associated with this stream enable request and the
termination status of the Stream Enable request.*/
USB_HOST_AUDIO_V1_0_STREAM_EVENT_ENABLE_COMPLETE,
/*This event occurs when an Audio v1.0 stream disable request has been
completed. This event is generated after the application calls the
USB_HOST_AUDIO_V1_0_StreamDisable function. The eventData parameter in the
event callback function will be of a pointer to a
USB_HOST_AUDIO_V1_0_STREAM_EVENT_DISABLE_COMPLETE_DATA. This contains details
about the request handle associated with this stream disable request and the
termination status of the Stream Disable request.*/
USB_HOST_AUDIO_V1_0_STREAM_EVENT_DISABLE_COMPLETE,
/*This event occurs when an Audio v1.0 sampling rate set request has been
completed. This event is generated after the application calls the
USB_HOST_AUDIO_V1_0_StreamSamplingRateSet function. The eventData
parameter in the event callback function will be of a pointer to a
USB_HOST_AUDIO_V1_0_STREAM_EVENT_SAMPLING_RATE_SET_COMPLETE_DATA. This
contains details about the request handle associated with this Sampling
Rate Set request and the termination status of the stream disable request.*/
USB_HOST_AUDIO_V1_0_STREAM_EVENT_SAMPLING_RATE_SET_COMPLETE,
}
USB_HOST_AUDIO_V1_0_STREAM_EVENT;
// *****************************************************************************
/* USB Host Audio v1.0 Class Driver Stream Direction
Summary:
USB Host Audio v1.0 Class Driver stream direction.
Description:
This macro defines the stream direction of the USB Host Audio v1.0 Class Driver.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_0_STREAM_DIRECTION USB_HOST_AUDIO_V1_STREAM_DIRECTION
#define USB_HOST_AUDIO_V1_0_DIRECTION_OUT USB_HOST_AUDIO_V1_DIRECTION_OUT
#define USB_HOST_AUDIO_V1_0_DIRECTION_IN USB_HOST_AUDIO_V1_DIRECTION_IN
// *****************************************************************************
/* USB Host Audio stream Info table structure
Summary:
This structure defines USB Host audio stream information structure.
Description:
This structure is an out parameter to the functions
USB_HOST_AUDIO_V1_0_StreamGetFirst and USB_HOST_AUDIO_V1_0_StreamGetNext
functions. This structure contains information about an audio stream in the
attached Audio Device. This structure contains the stream object, audio format, etc.
Remarks:
None.
*/
#if defined (USB_HOST_AUDIO_V1_0_SAMPLING_FREQUENCIES_NUMBER) && !defined (USB_HOST_AUDIO_V1_SAMPLING_FREQUENCIES_NUMBER)
#define USB_HOST_AUDIO_V1_SAMPLING_FREQUENCIES_NUMBER USB_HOST_AUDIO_V1_0_SAMPLING_FREQUENCIES_NUMBER
#endif
typedef struct
{
/* Audio Stream Object. Clients need to pass this object when opening this
audio stream using USB_HOST_AUDIO_V1_0_StreamOpen function. */
USB_HOST_AUDIO_V1_0_STREAM_OBJ streamObj;
/* Audio Format code for this Stream */
USB_AUDIO_FORMAT_CODE format;
/* Stream direction */
USB_HOST_AUDIO_V1_0_STREAM_DIRECTION streamDirection;
/* Number of physical channels in the audio stream */
uint8_t nChannels;
/* Number of bytes occupied by one audio sub-frame */
uint8_t subFrameSize;
/* Number of effectively used bits from the available bits in an audio sub-frame */
uint8_t bitResolution;
/* Indicates how the sampling frequency can be programmed:
0: Continuous sampling frequency
1..255: Number of discrete sampling frequencies supported by Audio stream
*/
uint8_t nSamplingRates;
/* Supported sampling Frequencies */
uint32_t tSamFreq[USB_HOST_AUDIO_V1_SAMPLING_FREQUENCIES_NUMBER];
} USB_HOST_AUDIO_V1_0_STREAM_INFO;
// *****************************************************************************
/* USB Host Audio v1.0 Class Stream Transfer Event Data.
Summary:
USB Host Audio v1.0 class stream transfer event data.
Description:
This data type defines the data structure returned by the Audio V1.0 Client
Driver in conjunction with the following events:
- USB_HOST_AUDIO_V1_0_STREAM_EVENT_READ_COMPLETE_DATA
- USB_HOST_AUDIO_V1_0_STREAM_EVENT_WRITE_COMPLETE_DATA
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_0_STREAM_EVENT_WRITE_COMPLETE_DATA USB_HOST_AUDIO_V1_STREAM_EVENT_WRITE_COMPLETE_DATA
#define USB_HOST_AUDIO_V1_0_STREAM_EVENT_READ_COMPLETE_DATA USB_HOST_AUDIO_V1_STREAM_EVENT_READ_COMPLETE_DATA
// *****************************************************************************
/* USB Host Audio v1.0 Class Stream Control Event Data.
Summary:
USB Host Audio v1.0 class stream control event data.
Description:
This data type defines the data structure returned by the Audio V1.0 Client
Driver in conjunction with the following events:
- USB_HOST_AUDIO_V1_0_STREAM_EVENT_ENABLE_COMPLETE_DATA
- USB_HOST_AUDIO_V1_0_STREAM_EVENT_DISABLE_COMPLETE_DATA
Remarks:
None.
*/
typedef struct
{
/* Transfer handle of this transfer */
USB_HOST_AUDIO_V1_0_REQUEST_HANDLE requestHandle;
/* Transfer termination status */
USB_HOST_AUDIO_V1_0_RESULT requestStatus;
}
USB_HOST_AUDIO_V1_0_STREAM_EVENT_ENABLE_COMPLETE_DATA,
USB_HOST_AUDIO_V1_0_STREAM_EVENT_DISABLE_COMPLETE_DATA;
// *****************************************************************************
/* USB Host Audio v1.0 Client Driver Attach Event Handler Function Pointer Type.
Summary:
USB Host Audio v1.0 Client Driver attach event handler function pointer type.
Description:
This data type defines the required function signature of the USB Host Audio v1.0
Client Driver attach event handling callback function. The application must
register a pointer to a Audio v1.0 Client Driver attach events handling function
whose function signature (parameter and return value types) match the types
specified by this function pointer in order to receive attach and detach events call
backs from the Audio v1.0 Client Driver. The client driver will invoke this
function with event relevant parameters. The descriptions of the event
handler function parameters are as follows:
- audioObj - Handle of the client to which this event is directed
- event - Event indicates if it is an attach or detach
- context - Value identifying the context of the application that was
registered with the event handling function
Remarks:
None.
*/
typedef void (* USB_HOST_AUDIO_V1_0_ATTACH_EVENT_HANDLER)
(
USB_HOST_AUDIO_V1_0_OBJ audioObj,
USB_HOST_AUDIO_V1_0_EVENT event,
uintptr_t context
);
// *****************************************************************************
/* USB Host Audio v1.0 Class Driver Stream Event Handler Function Pointer Type.
Summary:
USB Host Audio v1.0 Class Driver stream event handler function pointer type.
Description:
This data type defines the required function signature of the USB Host
Audio v1.0 Class Driver stream event handling callback function. The
application must register a pointer to a Audio v1.0 Class Driver stream
events handling function whose function signature (parameter and return
value types) match the types specified by this function pointer
to receive event call backs from the Audio v1.0 Class Driver. The class
driver will call this function with relevant event parameters. The
descriptions of the event handler function parameters are as follows:
- handle - Handle to the Audio v1.0 stream
- event - Type of event generated
- eventData - This parameter should be type casted to an event specific
pointer type based on the event that has occurred. Refer
to the USB_HOST_AUDIO_V1_0_STREAM_EVENT enumeration
description for more information.
- context - Value identifying the context of the application that
was registered with the event handling function
Remarks:
None.
*/
typedef USB_HOST_AUDIO_V1_0_STREAM_EVENT_RESPONSE (* USB_HOST_AUDIO_V1_0_STREAM_EVENT_HANDLER )
( USB_HOST_AUDIO_V1_0_STREAM_HANDLE handle,
USB_HOST_AUDIO_V1_0_STREAM_EVENT event,
void * eventData,
uintptr_t context );
// *****************************************************************************
/* USB Host Audio v1.0 Class driver Control Transfer Complete Callback Function
Pointer type
Summary:
USB Host Audio v1.0 Class Driver control transfer complete callback function
pointer type.
Description:
This data type defines the required function signature of the USB Host
Audio v1.0 Class Driver control transfer complete callback function. The
client must provide a pointer to a control transfer complete callback
function whose function signature (parameter and return value types) must
match the types specified by this function pointer to receive
notification when a control transfer has completed. The pointer to the
callback function must be specified in USB_HOST_AUDIO_V1_0_ControlRequest
function. The Audio v1.0 client driver will invoke this function with event
relevant parameters. The descriptions of the event handler function parameters
are as follows:
- audioObj - Audio v1.0 client driver object associated with this event
- requestHandle - Request handle of the control transfer request that caused
this event
- result - Completion result of the control transfer. This will be
USB_HOST_AUDIO_V1_0_RESULT_SUCCESS if the control transfer
completed successfully, USB_HOST_AUDIO_V1_0_RESULT_FAILURE
if an unknown failure occurred, or
USB_HOST_AUDIO_V1_0_RESULT_REQUEST_STALLED if the request
was stalled.
size - Size of the data stage that was transferred
context - Value identifying the context of the application that was
provided when the USB_HOST_AUDIO_V1_0_ControlRequest function
was called.
Remarks:
None.
*/
typedef void (* USB_HOST_AUDIO_V1_0_CONTROL_CALLBACK)
(
USB_HOST_AUDIO_V1_0_OBJ audioObj,
USB_HOST_AUDIO_V1_0_REQUEST_HANDLE requestHandle,
USB_HOST_AUDIO_V1_0_RESULT result,
size_t size,
uintptr_t context
);
// ****************************************************************************
// ****************************************************************************
// Section: Client Access Functions
// ****************************************************************************
// ****************************************************************************
// ****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_0_RESULT USB_HOST_AUDIO_V1_0_AttachEventHandlerSet
(
USB_HOST_AUDIO_V1_0_ATTACH_EVENT_HANDLER eventHandler,
uintptr_t context
);
Summary:
Sets an attach/detach event handler.
Description:
This function will set an attach event handler. The attach event handler
will be called when a Audio v1.0 device has been attached or detached. The
context will be returned in the event handler. This function should be
called before the bus has been enabled.
Precondition:
None.
Parameters:
eventHandler - Pointer to the attach event handler
context - An application defined context that will be returned in the event
handler
Returns:
- USB_HOST_AUDIO_V1_0_RESULT_SUCCESS - if the attach event handler was registered
successfully
- USB_HOST_AUDIO_V1_0_RESULT_FAILURE - if the number of registered event
handlers has exceeded USB_HOST_AUDIO_V1_0_ATTACH_LISTENERS_NUMBER
Remarks:
This function should be called before the USB_HOST_BusEnable function is called.
*/
#define USB_HOST_AUDIO_V1_0_AttachEventHandlerSet USB_HOST_AUDIO_V1_AttachEventHandlerSet
// ****************************************************************************
/* Function:
USB_HOST_DEVICE_OBJ_HANDLE USB_HOST_AUDIO_V1_0_DeviceObjHandleGet
(
USB_HOST_AUDIO_V1_0_OBJ audioDeviceObj
);
Summary:
Returns the device object handle for this Audio v1.0 Device.
Description:
This function returns the device object handle for this Audio v1.0 Device. This
returned device object handle can be used by the application to perform
device-level operations such as getting the string descriptors.
Precondition:
None.
Parameters:
audioDeviceObj - Audio V1.0 device object handle returned in the
USB_HOST_AUDIO_V1_0_ATTACH_EVENT_HANDLER function.
Returns:
This function will return a valid device object handle if the device is still
connected to the system. Otherwise, USB_HOST_DEVICE_OBJ_HANDLE_INVALID is
returned.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_0_DeviceObjHandleGet USB_HOST_AUDIO_V1_DeviceObjHandleGet
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_0_RESULT USB_HOST_AUDIO_V1_0_ControlRequest
(
USB_HOST_AUDIO_V1_0_OBJ audioObj,
USB_HOST_AUDIO_V1_0_REQUEST_HANDLE * requestHandle,
USB_SETUP_PACKET *setupPacket,
void * data,
USB_HOST_AUDIO_V1_0_CONTROL_CALLBACK callback,
uintptr_t context
);
Summary:
Schedules an Audio v1.0 control transfer.
Description:
This function schedules an Audio v1.0 control transfer. The audioObj parameter
is an object of the Audio v1.0 Class Driver to which the audio control transfer
is to be scheduled. The setupPacket parameter points to the SETUP command to be
sent in the setup state of the control transfer. The size and the direction of
the data stage is indicated by the SETUP packet. For control transfers where
there is no data stage, data is ignored and can be NULL. In all other instances,
data should point to the data to data be transferred in the data stage of
the control transfer.
If the transfer was scheduled successfully, requestHandle will contain a
transfer handle that uniquely identifies this transfer. If the transfer
could not be scheduled successfully, requestHandle will contain
USB_HOST_AUDIO_V1_0_REQUEST_HANDLE_INVALID.
When the control transfer completes, the Audio v1.0 Client Driver will call
the specified callback function. The context parameter specified here will
be returned in the callback.
Precondition:
The Audio v1.0 Device should be attached.
Parameters:
audioObj - Audio v1.0 client driver object
requestHandle - Output parameter that will contain the handle to this
transfer
setupPacket - Pointer to the SETUP packet to sent to the device in the SETUP
stage of the control transfer
data - For control transfer with a data stage, this should point to
data to be sent to the device (for a control write transfer)
or point to the buffer that will receive data from the device
(for a control read transfer). For control transfers that do not
require a data stage, this parameter is ignored and can be NULL.
callback - Pointer to the callback function that will be called when the
control transfer completes. If the callback function is NULL,
there will be no notification of when the control transfer will
complete.
context - User-defined context that is returned with the callback function
Returns:
- USB_HOST_AUDIO_V1_0_RESULT_SUCCESS - The transfer was scheduled successfully.
requestHandle will contain a valid transfer handle.
- USB_HOST_AUDIO_V1_0_RESULT_FAILURE - An unknown failure occurred. requestHandle will
contain USB_HOST_AUDIO_V1_0_REQUEST_HANDLE_INVALID.
- USB_HOST_AUDIO_V1_0_RESULT_PARAMETER_INVALID - The data pointer or requestHandle pointer
is NULL
Remarks:
None.
*/
USB_HOST_AUDIO_V1_0_RESULT USB_HOST_AUDIO_V1_0_ControlRequest
(
USB_HOST_AUDIO_V1_0_OBJ OBJ,
USB_HOST_AUDIO_V1_0_REQUEST_HANDLE * transferHandle,
USB_SETUP_PACKET *setupPacket,
void * data,
USB_HOST_AUDIO_V1_0_CONTROL_CALLBACK callback,
uintptr_t context
);
// *****************************************************************************
/* Function:
uint8_t USB_HOST_AUDIO_V1_0_NumberOfStreamGroupsGet
(
USB_HOST_AUDIO_V1_0_OBJ audioObj
);
Summary:
Gets the number of stream groups present in the attached Audio v1.0 Device.
Description:
This function will get number of stream groups present in the attached
Audio v1.0 Device. The audio stream within an audio stream cannot be enabled
at the same time.
Precondition:
The Audio v1.0 Device should have been attached.
Parameters:
audioObj - Audio v1.0 Client Driver object
Returns:
A returned uint8_t indicates the number of audio stream groups present in the
attached Audio v1.0 Device.
Remarks:
None.
*/
uint8_t USB_HOST_AUDIO_V1_0_NumberOfStreamGroupsGet
(
USB_HOST_AUDIO_V1_0_OBJ audioObj
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_0_RESULT USB_HOST_AUDIO_V1_0_StreamGetFirst
(
USB_HOST_AUDIO_V1_0_OBJ audioDeviceObj,
uint8_t streamGroupIndex,
USB_HOST_AUDIO_V1_0_STREAM_INFO * streamInfo
);
Summary:
Returns information about first audio stream in the specified audio stream
group.
Description:
This function returns information about the first audio stream in the specified
audio stream group. The stream group index is parameter to this function
and it can be any value starting from zero to the number of stream groups minus
one. Number of stream groups can be obtained by using the
USB_HOST_AUDIO_V1_0_NumberOfStreamGroupsGet function.
The streamInfo object is an out parameter to this function.
Precondition:
The Audio v1.0 Device should have been attached to the Host.
Parameters:
audioDeviceObj - Audio v1.0 Client Driver object
streamGroupIndex - Stream group index
streamInfo - Pointer to the streamInfo object
Returns:
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_0_RESULT_OBJ_INVALID - The specified Audio v1.0 client
driver object does not exist
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_0_RESULT USB_HOST_AUDIO_V1_0_StreamGetFirst
(
USB_HOST_AUDIO_V1_0_OBJ audioDeviceObj,
uint8_t streamGroupIndex,
USB_HOST_AUDIO_V1_0_STREAM_INFO * streamInfo
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_0_RESULT USB_HOST_AUDIO_V1_0_StreamGetNext
(
USB_HOST_AUDIO_V1_0_STREAM_OBJ audioStreamObj,
USB_HOST_AUDIO_V1_0_STREAM_INFO * streamInfo
);
Summary:
Returns information about the next audio stream in the specified audio stream
group.
Description:
This function returns information about next audio stream in the specified
Audio stream group. The USB_HOST_AUDIO_V1_0_StreamGetFirst function should
have been called at least once on the same audio stream group before calling
this function. Then, calling this function repeatedly on the stream group
will return information about the next audio stream in the stream group.
When there are no more audio streams to report, the function returns
USB_HOST_AUDIO_V1_0_RESULT_END_OF_STREAM_LIST.
Calling the USB_HOST_AUDIO_V1_0_StreamGetFirst function on the stream group
index after the USB_HOST_AUDIO_V1_0_StreamGetNext function has been called
will cause the Audio v1.0 Client Driver to reset the audio stream group to point
to the first stream in the stream group.
Precondition:
The USB_HOST_AUDIO_V1_0_StreamGetFirst function must have been called
before calling this function.
Parameters:
audioStreamObj - Present audio stream object
streamInfo - Pointer to the streamInfo object
Returns:
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_0_RESULT_OBJ_INVALID - The specified Audio v1.0 client
driver object does not exist
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_FAILURE - An unknown failure occurred
- USB_HOST_AUDIO_V1_0_RESULT_END_OF_STREAM_LIST - There are no more audio
streams in the stream group
Remarks:
None.
*/
USB_HOST_AUDIO_V1_0_RESULT USB_HOST_AUDIO_V1_0_StreamGetNext
(
USB_HOST_AUDIO_V1_0_STREAM_OBJ audioStreamObj,
USB_HOST_AUDIO_V1_0_STREAM_INFO * streamInfo
);
// ****************************************************************************
// ****************************************************************************
// Section: Audio Stream Access Functions
// ****************************************************************************
// ****************************************************************************
// ****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_0_STREAM_HANDLE USB_HOST_AUDIO_V1_0_StreamOpen
(
USB_HOST_AUDIO_V1_0_STREAM_OBJ audioStreamObj
);
Summary:
Opens the specified audio stream.
Description:
This function will open the specified audio stream. Once opened, the audio
stream can be accessed via the handle which this function returns. The
audioStreamObj parameter is the value returned in the
USB_HOST_AUDIO_V1_0_StreamGetFirst or USB_HOST_AUDIO_V1_0_StreamGetNext
functions.
Precondition:
The audio stream object should be valid.
Parameters:
audioStreamObj - Audio stream object
Return:
This function will return a valid handle if the audio stream could be opened
successfully; otherwise, it will return USB_HOST_AUDIO_V1_0_STREAM_RESULT_HANDLE_INVALID.
The function will return a valid handle if the stream is ready to be opened.
Remarks:
None.
*/
#define USB_HOST_AUDIO_V1_0_StreamOpen USB_HOST_AUDIO_V1_StreamOpen
// ****************************************************************************
/* Function:
void USB_HOST_AUDIO_V1_0_StreamClose
(
USB_HOST_AUDIO_V1_0_STREAM_HANDLE audioSteamHandle
);
Summary:
Closes the audio stream.
Description:
This function will close the open audio stream. This closes the association
between the application entity that opened the audio stream and the audio
stream. The audio stream handle becomes invalid.
Precondition:
None.
Parameters:
audioSteamHandle - handle to the audio stream obtained from the
USB_HOST_AUDIO_V1_0_StreamOpen function.
Returns:
None.
Remarks:
The device handle becomes invalid after calling this function.
*/
#define USB_HOST_AUDIO_V1_0_StreamClose USB_HOST_AUDIO_V1_StreamClose
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_0_STREAM_RESULT USB_HOST_AUDIO_V1_0_StreamEventHandlerSet
(
USB_HOST_AUDIO_V1_0_STREAM_HANDLE handle,
USB_HOST_AUDIO_V1_0_STREAM_EVENT_HANDLER appAudioHandler,
uintptr_t context
);
Summary:
Registers an event handler with the Audio v1.0 Client Driver stream.
Description:
This function registers a client specific Audio v1.0 stream event handler.
The Audio v1.0 Host Client Driver will call appAudioHandler function
specified as 2nd argument with relevant event and associate event data, in
response to audio stream data transfers that have been scheduled by the
client.
Precondition:
None.
Parameters:
handle - A handle to the Audio v1.0 stream
eventHandler - A pointer to event handler function. If NULL, events
will not be generated.
context - The application specific context that is returned in the event handler
Returns:
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_HANDLE_INVALID - The specified audio
stream does not exist
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_0_STREAM_RESULT USB_HOST_AUDIO_V1_0_StreamEventHandlerSet
(
USB_HOST_AUDIO_V1_0_STREAM_HANDLE handle,
USB_HOST_AUDIO_V1_0_STREAM_EVENT_HANDLER appAudioHandler,
uintptr_t context
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_0_STREAM_RESULT USB_HOST_AUDIO_V1_0_StreamEnable
(
USB_HOST_AUDIO_V1_0_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_0_REQUEST_HANDLE * requestHandle
);
Summary:
Schedules an audio stream enable request for the specified audio stream.
Description:
This function schedules an audio stream enable request for the specified
audio stream. An audio stream must be enable before scheduling any data
transfer with the stream. A USB_HOST_AUDIO_V1_0_STREAM_EVENT_ENABLE_COMPLETE
event is generated when this request is completed.
USB_HOST_AUDIO_V1_0_STREAM_EVENT_ENABLE_COMPLETE_DATA returns the status and
request handle of the request.
Precondition:
The audio stream should have been opened. Only one audio stream from an audio
stream group can be enabled at a time.
Parameters:
streamHandle - Handle to the audio v1.0 stream
requestHandle - Handle to the stream enable request
Returns:
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_HANDLE_INVALID - The specified audio
stream does not exist
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_0_STREAM_RESULT USB_HOST_AUDIO_V1_0_StreamEnable
(
USB_HOST_AUDIO_V1_0_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_0_REQUEST_HANDLE * requestHandle
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_0_STREAM_RESULT USB_HOST_AUDIO_V1_0_StreamDisable
(
USB_HOST_AUDIO_V1_0_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_0_REQUEST_HANDLE * requestHandle
);
Summary:
Schedules an audio stream disable request for the specified audio stream.
Description:
This function schedules an audio stream disable request for the specified
audio stream. A USB_HOST_AUDIO_V1_0_STREAM_EVENT_DISABLE_COMPLETE event is
generated when this request is completed.
USB_HOST_AUDIO_V1_0_STREAM_EVENT_DISABLE_COMPLETE_DATA
returns the status and request handle of the request.
Precondition:
The audio stream should have been opened.
Parameters:
streamHandle - Handle to the Audio v1.0 stream
requestHandle - Handle to the stream disable request
Returns:
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_HANDLE_INVALID - The specified audio
stream does not exist
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_0_STREAM_RESULT USB_HOST_AUDIO_V1_0_StreamDisable
(
USB_HOST_AUDIO_V1_0_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_0_REQUEST_HANDLE *requestHandle
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_0_STREAM_RESULT USB_HOST_AUDIO_V1_0_StreamSamplingRateSet
(
USB_HOST_AUDIO_V1_0_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_0_REQUEST_HANDLE requestHandle,
uint32_t* samplingRate
);
Summary:
Schedules an audio stream set sampling rate request for the specified
audio stream.
Description:
This function schedules an audio stream set sampling rate request for the
specified audio stream. A USB_HOST_AUDIO_V1_0_STREAM_EVENT_SAMPLING_RATE_SET_COMPLETE
event is generated when this request is completed.
USB_HOST_AUDIO_V1_0_STREAM_EVENT_SAMPLING_RATE_SET_COMPLETE_DATA returns
the status and request handle of the request.
Precondition:
The audio stream should have been opened.
Parameters:
streamHandle - Handle to the Audio v1.0 stream
requestHandle - Handle to the stream set sampling rate request
samplingRate - Pointer to the sampling rate
Returns:
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_HANDLE_INVALID - The specified audio
stream does not exist
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_0_STREAM_RESULT USB_HOST_AUDIO_V1_0_StreamSamplingRateSet
(
USB_HOST_AUDIO_V1_0_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_0_REQUEST_HANDLE *requestHandle,
uint32_t *samplingRate
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_0_STREAM_RESULT USB_HOST_AUDIO_V1_0_StreamWrite
(
USB_HOST_AUDIO_V1_0_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_0_STREAM_TRANSFER_HANDLE * transferHandle,
void * source,
size_t length
);
Summary:
Schedules an audio stream write request for the specified audio stream.
Description:
This function schedules an audio stream write request for the specified
audio stream. A USB_HOST_AUDIO_V1_0_STREAM_EVENT_WRITE_COMPLETE event is
generated when this request is completed.
USB_HOST_AUDIO_V1_0_STREAM_EVENT_WRITE_COMPLETE_DATA returns
the status and request handle of the request.
Precondition:
The audio stream should have been opened and enabled. The direction of the
audio stream should be USB_HOST_AUDIO_V1_0_DIRECTION_OUT.
Parameters:
streamHandle - Handle to the Audio v1.0 stream
transferHandle - Handle to the stream write transfer request
source - Pointer to the buffer containing data to be written to the
device
length - Amount of data to write (in bytes)
Returns:
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_HANDLE_INVALID - The specified audio
stream does not exist
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_0_StreamWrite
(
USB_HOST_AUDIO_V1_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_STREAM_TRANSFER_HANDLE * transferHandle,
void * source,
size_t length
);
// *****************************************************************************
/* Function:
USB_HOST_AUDIO_V1_0_STREAM_RESULT USB_HOST_AUDIO_V1_0_StreamRead
(
USB_HOST_AUDIO_V1_0_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_0_STREAM_TRANSFER_HANDLE * transferHandle,
void * source,
size_t length
);
Summary:
Schedules an audio stream read request for the specified audio stream.
Description:
This function schedules an audio stream read request for the specified
audio stream. A USB_HOST_AUDIO_V1_0_STREAM_EVENT_READ_COMPLETE event is
generated when this request is completed.
USB_HOST_AUDIO_V1_0_STREAM_EVENT_READ_COMPLETE_DATA returns
the status and request handle of the request.
Precondition:
The audio stream should have been opened and enabled. The direction of the
audio stream should be USB_HOST_AUDIO_V1_0_DIRECTION_IN.
Parameters:
streamHandle - Handle to the Audio v1.0 stream
transferHandle - Handle to the stream read transfer request
source - Pointer to the buffer containing data to be read from the
device
length - Amount of data to read (in bytes)
Returns:
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_SUCCESS - The operation was successful
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_HANDLE_INVALID - The specified audio
stream does not exist
- USB_HOST_AUDIO_V1_0_STREAM_RESULT_FAILURE - An unknown failure occurred
Remarks:
None.
*/
USB_HOST_AUDIO_V1_RESULT USB_HOST_AUDIO_V1_0_StreamRead
(
USB_HOST_AUDIO_V1_STREAM_HANDLE streamHandle,
USB_HOST_AUDIO_V1_STREAM_TRANSFER_HANDLE * transferHandle,
void * source,
size_t length
);
// *****************************************************************************
// *****************************************************************************
// Section: Included Files (continued)
// *****************************************************************************
// *****************************************************************************
/* The following included file maps the interface definitions above to appropriate
static implementations depending on the build mode.
*/
#include "usb/src/usb_host_audio_v1_mapping.h"
#endif |
# -*- coding: utf-8 -*-
from notifier import Notifier
from musicmode import *
from brain import Brain
from mpd import MPDClient
import str_formater
from modules.app_utils import *
class Conversation(object):
def __init__(self, mic, profile, logger):
self.persona = profile['persona']
self.mic = mic
self.profile = profile
self.brain = Brain(mic, profile, logger)
self.notifier = Notifier(profile, logger)
self.logger = logger
def delegateInput(self, text):
"""A wrapper for querying brain."""
got_hit = False
# check if input is meant to start the music module
if any(x in text.upper() for x in ["SPOTIFY", "MUSIC"]):
# check if mpd client is running
try:
client = MPDClient()
client.timeout = None
client.idletimeout = None
client.connect("localhost", 6600)
except:
self.logger.warning("Failed to init MPDClient")
self.mic.say("Wybacz, ale najwyraźniej usługa Spotify nie działa")
return
self.logger.info("waiting for Spotify playlist")
self.mic.say("Poczekaj chwilę, wczytuję listę utworów Spotify")
music_mode = MusicMode(self.persona, self.mic, self.logger)
music_mode.handleForever()
return
else:
if " następnie " in lowerUTF8(text):
l_text = text.split(" następnie ")
for text in l_text:
new_got_hit = self.brain.query(text)
got_hit = got_hit or new_got_hit
else:
got_hit = self.brain.query(text)
return got_hit
def handleForever(self):
"""Delegates user input to the handling function when activated."""
initial_threshold = None #self.mic.fetchThreshold(RATE=48000, CHUNK=8192, THRESHOLD_TIME=4, AVERAGE_TIME=4)
repeat = True
while repeat:
# Print notifications until empty
notifications = self.notifier.getAllNotifications()
for notif in notifications:
notif = str_formater.unicodeToUTF8(notif, self.logger)
self.logger.info("Got new notification: %s" % notif )
#self.mic.say(notif)
try:
threshold, transcribed = self.mic.passiveListen()
except KeyboardInterrupt:
threshold = None
repeat = False
except:
self.logger.critical("fatal error processing passive listen", exc_info=True)
continue
if threshold:
if transcribed:
input = transcribed
else:
input = self.mic.activeListen(initial_threshold, RATE=44100, CHUNK=8196, LISTEN_TIME=6, AVERAGE_TIME=5)
input = str_formater.unicodeToUTF8(input, self.logger)
self.logger.debug("got input %s" % (input))
if input:
if any(x in input.upper() for x in ["KONIEC"]):
repeat = False
self.logger.info("Quiting after voice request")
self.mic.say("Kończę pracę. Do usłyszenia.")
#elif any(x in input.upper().replace('ł','Ł') for x in ["PRZEŁADUJ"]):
elif any(x in upperUTF8(input) for x in ["PRZEŁADUJ"]):
self.brain.reload_modules()
elif any(x in upperUTF8(input) for x in ["ECHO"]):
self.mic.say(input)
#self.mic.play(input)
else:
self.delegateInput(input)
else:
self.mic.say("Powtórz proszę.")
# else:
# if any(x in transcribed.upper() for x in ["KONIEC"]):
# repeat = False
# self.logger.info("Quiting after voice request")
# self.mic.say("Kończę pracę. Do usłyszenia.")
# elif any(x in upperUTF8(transcribed) for x in ["PRZEŁADUJ"]):
# self.brain.reload_modules()
|
/* APPLE LOCAL file v7 merge */
/* Test the `veoru8' ARM Neon intrinsic. */
/* This file was autogenerated by neon-testgen. */
/* { dg-do assemble } */
/* { dg-require-effective-target arm_neon_ok } */
/* { dg-options "-save-temps -O0 -mfpu=neon -mfloat-abi=softfp" } */
#include "arm_neon.h"
void test_veoru8 (void)
{
uint8x8_t out_uint8x8_t;
uint8x8_t arg0_uint8x8_t;
uint8x8_t arg1_uint8x8_t;
out_uint8x8_t = veor_u8 (arg0_uint8x8_t, arg1_uint8x8_t);
}
/* { dg-final { scan-assembler "veor\[ \]+\[dD\]\[0-9\]+, \[dD\]\[0-9\]+, \[dD\]\[0-9\]+!?\(\[ \]+@\[a-zA-Z0-9 \]+\)?\n" } } */
/* { dg-final { cleanup-saved-temps } } */
|
import requests
import os
import tkinter.messagebox as tkmsg
import os.path
import sys
import tkinter as tk
from flask_sockets import Sockets
from time import sleep
from flask import Flask, session, redirect, render_template, request, make_response, jsonify
from bs4 import BeautifulSoup
from gevent import pywsgi
from geventwebsocket.handler import WebSocketHandler
#名前
filename = "result"
#ファイルの初期化
ddf = open('result.txt','w')
ddf.write('')
ddf.close()
#ここまで
#ここからボタンクリック時の処理
def ma():
#入力フォーム内のURLを取得
UR = txt.get()
print(UR)
#ここまでURL取得
#URL生成
furl = UR + "/1/"
#ここまで
#1ページ目の内容取得
try:
rf = requests.get(furl)
except:
tkmsg.showinfo('info',"何らかのエラーが発生しました。正しいURLを入力しているかご確認ください。")
soup = BeautifulSoup(rf.content, "html.parser")
resF = soup.find_all("p")
#ここまで1ページ目の内容取得
#info表示
tkmsg.showinfo('info',"OKを押すと処理を開始します。")
#ココマデinfo
print(resF)
run(UR,resF)
def run(UR,resF):
i = 0
sd = ch(i,"",resF)
tkmsg.showinfo('info',"処理を開始しました。このポップアップは処理終了後自動的に消滅します。")
while not sd == "y":
i += 1
print(i)
url = UR + "/" + str(i) + "/"
print(url)
r = requests.get(url)
soup = BeautifulSoup(r.content, "html.parser")
res = soup.find_all("p")
sd = ch(i,res,resF)
f = open('result.txt','a')
f.write(str(res) + '\n')
f.close()
print(res)
sleep(5)
tkmsg.showinfo('info',"処理を終了しました。結果はresult.txtに出力されています。ウィンドウを閉じることができます。")
def ch(i,res,resF):
if not i == 1:
if res == resF or res == "<p>The article you were looking for was not found, but maybe try looking again!</p>" :
sd = "y"
else:
sd = "n"
else:
# print("I=else")
sd = "n"
return sd
#tkinter setup
ro = tk.Tk()
ro.title("記事文字起こし")
ro.geometry("640x480")
lbl = tk.Label(text='urlを入力してください。')
lbl.place(x=100, y=70)
txt = tk.Entry(width=20)
txt.place(x=100, y=100)
btn = tk.Button(ro, text='送信', command=ma,bg='#4076e3')
btn.place(x=100, y=200)
ro.mainloop()
#ここまで
|
# Generated by Django 3.0.7 on 2020-08-23 19:03
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('explore', '0023_searchresult_order'),
]
operations = [
migrations.CreateModel(
name='TableResult',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(max_length=255)),
('idx', models.IntegerField()),
('show', models.CharField(max_length=300)),
('subcorpora', models.CharField(max_length=300)),
('relative', models.CharField(max_length=20, null=True)),
('keyness', models.CharField(max_length=2, null=True)),
('sort', models.CharField(max_length=10, null=True)),
('produced_from', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='explore.SearchResult')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'unique_together': {('slug', 'user', 'idx')},
},
),
]
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[15],{710:function(t,e,n){"use strict";n.r(e);var o=n(623),c=n(0),r=n(114),l=c.a.extend({data:()=>({contact:r.contact}),methods:{parsePhoneNumberFromString:o.a,openCookiePrefs(){this.$store.commit("cookies/openMessage")}},head:()=>({title:"privacy"})}),h=n(4),component=Object(h.a)(l,(function(){var t=this,e=t.$createElement,n=t._self._c||e;return n("main",[t._m(0),t._v(" "),n("div",{staticClass:"container"},[n("section",{staticClass:"card row",attrs:{id:"content"}},[t._m(1),t._v(" "),t._m(2),t._v(" "),n("div",{staticClass:"col-12 col-l-6"},[n("h4",[t._v("Responsible for this website")]),t._v(" "),n("p",[t._v("\n The persons responsible for data processing on this website are:\n ")]),t._v(" "),n("p",[t._v("\n "+t._s(t.contact.company)),n("br"),t._v("\n "+t._s(t.contact.street)),n("br"),t._v("\n "+t._s(t.contact.postcode)+" "+t._s(t.contact.city)+"\n ")]),t._v(" "),n("p",[n("a",{attrs:{href:t.parsePhoneNumberFromString(t.contact.phone,"DE").getURI(),title:t.contact.company+" anrufen","data-icon-left":"k"}},[t._v("\n "+t._s(t.contact.phone)+" ")]),n("br"),t._v(" "),n("Mailto",{attrs:{mail:t.contact.mail,"data-icon-left":"h"}})],1)]),t._v(" "),n("div",{staticClass:"col-12"},[n("h3",[t._v("Plugins and Tools")]),t._v(" "),n("p",[t._v("\n The first time you open this web page, a window for Configuration of\n the local website settings is displayed. You have the option of\n using cookies and allow or prohibit third party services.\n ")]),t._v(" "),n("button",{staticClass:"button secondary",attrs:{"data-icon-right":"n"},on:{click:t.openCookiePrefs}},[t._v("\n Change Configuration\n ")])])])])])}),[function(){var t=this.$createElement,e=this._self._c||t;return e("h1",{staticClass:"container title"},[e("span",{staticClass:"bg-primary"},[this._v("privacy")]),e("br"),this._v(" "),e("span",{staticClass:"bg-white"},[this._v("Company")])])},function(){var t=this.$createElement,e=this._self._c||t;return e("header",{staticClass:"col-12"},[e("h2",[this._v("privacy")])])},function(){var t=this.$createElement,e=this._self._c||t;return e("div",{staticClass:"col-12 col-l-6"},[e("h3",[this._v("Put all your privacy-stuff here")]),this._v(" "),e("p",[this._v("\n Lorem ipsum dolor sit amet consectetur adipisicing elit. Voluptas\n amet ducimus alias a. Rerum amet totam eos magnam omnis ea inventore\n sint, necessitatibus molestiae et explicabo quos ex dicta saepe.\n ")])])}],!1,null,null,null);e.default=component.exports}}]); |
#!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the behavior of RPC importprivkey on set and unset labels of
addresses.
It tests different cases in which an address is imported with importaddress
with or without a label and then its private key is imported with importprivkey
with and without a label.
"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.wallet_util import test_address
class ImportWithLabel(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [["-addresstype=p2sh-segwit"]] * 2
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
"""Main test logic"""
self.log.info(
"Test importaddress with label and importprivkey without label."
)
self.log.info("Import a watch-only address with a label.")
address = self.nodes[0].getnewaddress()
label = "Test Label"
self.nodes[1].importaddress(address, label)
test_address(self.nodes[1],
address,
iswatchonly=True,
ismine=False,
label=label)
self.log.info(
"Import the watch-only address's private key without a "
"label and the address should keep its label."
)
priv_key = self.nodes[0].dumpprivkey(address)
self.nodes[1].importprivkey(priv_key)
test_address(self.nodes[1],
address,
label=label)
self.log.info(
"Test importaddress without label and importprivkey with label."
)
self.log.info("Import a watch-only address without a label.")
address2 = self.nodes[0].getnewaddress()
self.nodes[1].importaddress(address2)
test_address(self.nodes[1],
address2,
iswatchonly=True,
ismine=False,
label="")
self.log.info(
"Import the watch-only address's private key with a "
"label and the address should have its label updated."
)
priv_key2 = self.nodes[0].dumpprivkey(address2)
label2 = "Test Label 2"
self.nodes[1].importprivkey(priv_key2, label2)
test_address(self.nodes[1],
address2,
label=label2)
self.log.info("Test importaddress with label and importprivkey with label.")
self.log.info("Import a watch-only address with a label.")
address3 = self.nodes[0].getnewaddress()
label3_addr = "Test Label 3 for importaddress"
self.nodes[1].importaddress(address3, label3_addr)
test_address(self.nodes[1],
address3,
iswatchonly=True,
ismine=False,
label=label3_addr)
self.log.info(
"Import the watch-only address's private key with a "
"label and the address should have its label updated."
)
priv_key3 = self.nodes[0].dumpprivkey(address3)
label3_priv = "Test Label 3 for importprivkey"
self.nodes[1].importprivkey(priv_key3, label3_priv)
test_address(self.nodes[1],
address3,
label=label3_priv)
self.log.info(
"Test importprivkey won't label new dests with the same "
"label as others labeled dests for the same key."
)
self.log.info("Import a watch-only legacy address with a label.")
address4 = self.nodes[0].getnewaddress()
label4_addr = "Test Label 4 for importaddress"
self.nodes[1].importaddress(address4, label4_addr)
test_address(self.nodes[1],
address4,
iswatchonly=True,
ismine=False,
label=label4_addr,
embedded=None)
self.log.info(
"Import the watch-only address's private key without a "
"label and new destinations for the key should have an "
"empty label while the 'old' destination should keep "
"its label."
)
priv_key4 = self.nodes[0].dumpprivkey(address4)
self.nodes[1].importprivkey(priv_key4)
embedded_addr = self.nodes[1].getaddressinfo(address4)['embedded']['address']
test_address(self.nodes[1],
embedded_addr,
label="")
test_address(self.nodes[1],
address4,
label=label4_addr)
self.stop_nodes()
if __name__ == "__main__":
ImportWithLabel().main()
|
import React from 'react'
import '../App.css'
export const Footer = () => {
return (
<div>
<section id="lab_social_icon_footer">
{/* <!-- Include Font Awesome Stylesheet in Header --> */}
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.1.0/css/font-awesome.min.css" rel="stylesheet"/>
<div className="container footerContainer">
<div className="text-center center-block">
<a href="https://www.facebook.com/bootsnipp"><i id="social-gh" class="fa fa-github-square fa-3x social"></i></a>
<a href="https://www.youtube.com/channel/UCjVDEpRu1mFO2MPWFluwUhA?view_as=subscriber">
<i id="social-yt" class="fa fa-youtube-square fa-3x social">
</i>
</a>
</div>
<div className="text-center center-block transparentText fontWeight">
Made with 💖 by <span>BINH NGUYEN</span> aka <strong>SHIRO</strong>
</div>
</div>
</section>
</div>
)
}
export default Footer |
from __future__ import absolute_import, unicode_literals
import datetime
import logging
from django.utils.translation import ugettext_lazy as _, ungettext
from debug_toolbar.panels import Panel
from debug_toolbar.utils import ThreadCollector
try:
import threading
except ImportError:
threading = None
MESSAGE_IF_STRING_REPRESENTATION_INVALID = "[Could not get log message]"
class LogCollector(ThreadCollector):
def collect(self, item, thread=None):
# Avoid logging SQL queries since they are already in the SQL panel
# TODO: Make this check whether SQL panel is enabled
if item.get("channel", "") == "django.db.backends":
return
super(LogCollector, self).collect(item, thread)
class ThreadTrackingHandler(logging.Handler):
def __init__(self, collector):
logging.Handler.__init__(self)
self.collector = collector
def emit(self, record):
try:
message = record.getMessage()
except Exception:
message = MESSAGE_IF_STRING_REPRESENTATION_INVALID
record = {
"message": message,
"time": datetime.datetime.fromtimestamp(record.created),
"level": record.levelname,
"file": record.pathname,
"line": record.lineno,
"channel": record.name,
}
self.collector.collect(record)
# We don't use enable/disable_instrumentation because logging is global.
# We can't add thread-local logging handlers. Hopefully logging is cheap.
collector = LogCollector()
logging_handler = ThreadTrackingHandler(collector)
logging.root.addHandler(logging_handler)
class LoggingPanel(Panel):
template = "debug_toolbar/panels/logging.html"
def __init__(self, *args, **kwargs):
super(LoggingPanel, self).__init__(*args, **kwargs)
self._records = {}
nav_title = _("Logging")
@property
def nav_subtitle(self):
records = self._records[threading.currentThread()]
record_count = len(records)
return ungettext("%(count)s message", "%(count)s messages", record_count) % {
"count": record_count
}
title = _("Log messages")
def process_request(self, request):
collector.clear_collection()
def generate_stats(self, request, response):
records = collector.get_collection()
self._records[threading.currentThread()] = records
collector.clear_collection()
self.record_stats({"records": records})
|
def f(x):
if x:
return
if x:
return
elif y:
return
if x:
return
else:
return
if x:
return
elif y:
return
else:
return
if x:
return
elif y:
return
elif z:
return
else:
return
return None
|
import random
from collections import OrderedDict
from datetime import datetime, timedelta
from django.conf import settings
from django.contrib.humanize.templatetags.humanize import naturaltime
from django.template import loader
from django.utils import translation
from django.utils.translation import ugettext, ugettext_lazy as _, ungettext
import django_tables2 as tables
import jinja2
import olympia.core.logger
from olympia import amo
from olympia.access import acl
from olympia.activity.models import ActivityLog
from olympia.activity.utils import log_and_notify, send_activity_mail
from olympia.addons.models import (
Addon, AddonApprovalsCounter, AddonReviewerFlags)
from olympia.amo.templatetags.jinja_helpers import absolutify
from olympia.amo.urlresolvers import reverse
from olympia.amo.utils import to_language
from olympia.discovery.models import DiscoveryItem
from olympia.lib.crypto.signing import sign_file
from olympia.reviewers.models import (
ReviewerScore, ViewUnlistedAllList, get_flags, get_flags_for_row)
from olympia.users.models import UserProfile
log = olympia.core.logger.getLogger('z.mailer')
PENDING_STATUSES = (amo.STATUS_DISABLED, amo.STATUS_NULL,
amo.STATUS_PENDING, amo.STATUS_APPROVED)
class ItemStateTable(object):
def increment_item(self):
self.item_number += 1
def set_page(self, page):
self.item_number = page.start_index()
def safe_substitute(string, *args):
return string % tuple(jinja2.escape(arg) for arg in args)
class ReviewerQueueTable(tables.Table, ItemStateTable):
addon_name = tables.Column(verbose_name=_(u'Add-on'))
addon_type_id = tables.Column(verbose_name=_(u'Type'))
waiting_time_min = tables.Column(verbose_name=_(u'Waiting Time'))
flags = tables.Column(verbose_name=_(u'Flags'), orderable=False)
class Meta:
orderable = True
def render_addon_name(self, record):
url = reverse('reviewers.review', args=[record.addon_slug])
self.increment_item()
return u'<a href="%s">%s <em>%s</em></a>' % (
url, jinja2.escape(record.addon_name),
jinja2.escape(record.latest_version))
def render_addon_type_id(self, record):
return amo.ADDON_TYPE[record.addon_type_id]
def render_flags(self, record):
if not hasattr(record, 'flags'):
record.flags = get_flags_for_row(record)
return ''.join(u'<div class="app-icon ed-sprite-%s" '
u'title="%s"></div>' % flag
for flag in record.flags)
@classmethod
def translate_sort_cols(cls, colname):
legacy_sorts = {
'name': 'addon_name',
'age': 'waiting_time_min',
'type': 'addon_type_id',
}
return legacy_sorts.get(colname, colname)
def render_waiting_time_min(self, record):
if record.waiting_time_min == 0:
r = _('moments ago')
elif record.waiting_time_hours == 0:
# L10n: first argument is number of minutes
r = ungettext(
u'{0} minute', u'{0} minutes',
record.waiting_time_min).format(record.waiting_time_min)
elif record.waiting_time_days == 0:
# L10n: first argument is number of hours
r = ungettext(
u'{0} hour', u'{0} hours',
record.waiting_time_hours).format(record.waiting_time_hours)
else:
# L10n: first argument is number of days
r = ungettext(
u'{0} day', u'{0} days',
record.waiting_time_days).format(record.waiting_time_days)
return jinja2.escape(r)
@classmethod
def default_order_by(cls):
return '-waiting_time_min'
class ViewUnlistedAllListTable(tables.Table, ItemStateTable):
addon_name = tables.Column(verbose_name=_(u'Add-on'))
guid = tables.Column(verbose_name=_(u'GUID'))
authors = tables.Column(verbose_name=_(u'Authors'),
orderable=False)
review_date = tables.Column(verbose_name=_(u'Last Review'))
version_date = tables.Column(verbose_name=_(u'Last Update'))
class Meta(ReviewerQueueTable.Meta):
model = ViewUnlistedAllList
def render_addon_name(self, record):
url = reverse('reviewers.review', args=[
'unlisted',
record.addon_slug if record.addon_slug is not None else record.id,
])
self.increment_item()
return safe_substitute(u'<a href="%s">%s <em>%s</em></a>',
url, record.addon_name, record.latest_version)
def render_guid(self, record):
return safe_substitute(u'%s', record.guid)
def render_version_date(self, record):
return safe_substitute(u'<span>%s</span>', record.version_date)
def render_review_date(self, record):
if record.review_version_num is None:
return ugettext('No Reviews')
return safe_substitute(
u'<span class="addon-review-text">'
u'<a href="#"><em>%s</em> on %s</a></span>',
record.review_version_num, record.review_date)
def render_authors(self, record):
authors = record.authors
if not len(authors):
return ''
more = ' '.join(
safe_substitute(u'%s', uname) for (_, uname) in authors)
author_links = ' '.join(
safe_substitute(u'<a href="%s">%s</a>',
UserProfile.create_user_url(id_), uname)
for (id_, uname) in authors[0:3])
return u'<span title="%s">%s%s</span>' % (
more, author_links, ' ...' if len(authors) > 3 else '')
@classmethod
def default_order_by(cls):
return '-version_date'
def view_table_factory(viewqueue):
class ViewQueueTable(ReviewerQueueTable):
class Meta(ReviewerQueueTable.Meta):
model = viewqueue
return ViewQueueTable
class ModernAddonQueueTable(ReviewerQueueTable):
addon_name = tables.Column(verbose_name=_(u'Add-on'), accessor='name')
# Override empty_values for flags so that they can be displayed even if the
# model does not have a flags attribute.
flags = tables.Column(
verbose_name=_(u'Flags'), empty_values=(), orderable=False)
last_human_review = tables.DateTimeColumn(
verbose_name=_(u'Last Review'),
accessor='addonapprovalscounter.last_human_review')
weight = tables.Column(
verbose_name=_(u'Weight'),
accessor='_current_version.autoapprovalsummary.weight')
class Meta(ReviewerQueueTable.Meta):
fields = ('addon_name', 'flags', 'last_human_review', 'weight')
# Exclude base fields ReviewerQueueTable has that we don't want.
exclude = ('addon_type_id', 'waiting_time_min',)
orderable = False
def render_flags(self, record):
if not hasattr(record, 'flags'):
record.flags = get_flags(record, record.current_version)
return super(ModernAddonQueueTable, self).render_flags(record)
def _get_addon_name_url(self, record):
return reverse('reviewers.review', args=[record.slug])
def render_addon_name(self, record):
url = self._get_addon_name_url(record)
return u'<a href="%s">%s <em>%s</em></a>' % (
url, jinja2.escape(record.name),
jinja2.escape(record.current_version))
def render_last_human_review(self, value):
return naturaltime(value) if value else ''
def render_weight(self, value):
if value > amo.POST_REVIEW_WEIGHT_HIGHEST_RISK:
classname = 'highest'
elif value > amo.POST_REVIEW_WEIGHT_HIGH_RISK:
classname = 'high'
elif value > amo.POST_REVIEW_WEIGHT_MEDIUM_RISK:
classname = 'medium'
else:
classname = 'low'
return '<span class="risk-%s">%d</span>' % (classname, value)
render_last_content_review = render_last_human_review
class ExpiredInfoRequestsTable(ModernAddonQueueTable):
deadline = tables.Column(
verbose_name=_(u'Information Request Deadline'),
accessor='addonreviewerflags.pending_info_request')
class Meta(ModernAddonQueueTable.Meta):
fields = ('addon_name', 'flags', 'last_human_review', 'weight',
'deadline')
def render_deadline(self, value):
return naturaltime(value) if value else ''
class AutoApprovedTable(ModernAddonQueueTable):
pass
class ContentReviewTable(AutoApprovedTable):
last_updated = tables.DateTimeColumn(verbose_name=_(u'Last Updated'))
class Meta(ReviewerQueueTable.Meta):
fields = ('addon_name', 'flags', 'last_updated')
# Exclude base fields ReviewerQueueTable has that we don't want.
exclude = ('addon_type_id', 'last_human_review', 'waiting_time_min',
'weight')
orderable = False
def render_last_updated(self, value):
return naturaltime(value) if value else ''
def _get_addon_name_url(self, record):
return reverse('reviewers.review', args=['content', record.slug])
class ReviewHelper(object):
"""
A class that builds enough to render the form back to the user and
process off to the correct handler.
"""
def __init__(self, request=None, addon=None, version=None,
content_review_only=False):
self.handler = None
self.required = {}
self.addon = addon
self.version = version
self.content_review_only = content_review_only
self.set_review_handler(request)
self.actions = self.get_actions(request)
def set_data(self, data):
self.handler.set_data(data)
def set_review_handler(self, request):
if (self.version and
self.version.channel == amo.RELEASE_CHANNEL_UNLISTED):
self.handler = ReviewUnlisted(
request, self.addon, self.version, 'unlisted',
content_review_only=self.content_review_only)
elif self.addon.status == amo.STATUS_NOMINATED:
self.handler = ReviewAddon(
request, self.addon, self.version, 'nominated',
content_review_only=self.content_review_only)
else:
self.handler = ReviewFiles(
request, self.addon, self.version, 'pending',
content_review_only=self.content_review_only)
def get_actions(self, request):
actions = OrderedDict()
if request is None:
# If request is not set, it means we are just (ab)using the
# ReviewHelper for its `handler` attribute and we don't care about
# the actions.
return actions
# Conditions used below.
is_post_reviewer = acl.action_allowed(
request, amo.permissions.ADDONS_POST_REVIEW)
is_unlisted_reviewer = acl.action_allowed(
request, amo.permissions.ADDONS_REVIEW_UNLISTED)
is_content_reviewer = acl.action_allowed(
request, amo.permissions.ADDONS_CONTENT_REVIEW)
is_admin_tools_viewer = acl.action_allowed(
request, amo.permissions.REVIEWS_ADMIN)
reviewable_because_complete = self.addon.status not in (
amo.STATUS_NULL, amo.STATUS_DELETED)
regular_addon_review_is_allowed = (
not self.content_review_only and
not self.addon.needs_admin_code_review and
not self.addon.needs_admin_content_review and
not self.addon.needs_admin_theme_review
)
regular_content_review_is_allowed = (
self.content_review_only and
not self.addon.needs_admin_content_review and
(
not self.addon.needs_admin_code_review or
self.version.source
))
reviewable_because_not_reserved_for_admins_or_user_is_admin = (
is_admin_tools_viewer or
(
self.version and
(
regular_addon_review_is_allowed or
regular_content_review_is_allowed
)
))
reviewable_because_pending = (
self.version and self.version.is_unreviewed)
# Note: approval/content confirmation do not care about self.version,
# only self.addon.current_version. This allows reviewers to approve
# add-ons even when their latest submitted version is disabled for some
# reason.
was_auto_approved_and_user_can_post_review = (
self.addon.current_version and
self.addon.current_version.was_auto_approved and
is_post_reviewer and
not self.content_review_only)
is_unlisted_and_user_can_review_unlisted = (
self.version and
self.version.channel == amo.RELEASE_CHANNEL_UNLISTED and
is_unlisted_reviewer)
is_public_and_listed_and_user_can_post_review = (
self.version and
self.addon.status == amo.STATUS_APPROVED and
self.version.channel == amo.RELEASE_CHANNEL_LISTED and
is_post_reviewer)
is_valid_and_listed_and_user_can_content_review = (
self.version and
(self.addon.is_public() or self.addon.is_unreviewed()) and
self.version.channel == amo.RELEASE_CHANNEL_LISTED and
is_content_reviewer and self.content_review_only)
# Definitions for all actions.
actions['public'] = {
'method': self.handler.process_public,
'minimal': False,
'details': _('This will approve, sign, and publish this '
'version. The comments will be sent to the '
'developer.'),
'label': _('Approve'),
'available': (
reviewable_because_complete and
reviewable_because_not_reserved_for_admins_or_user_is_admin and
reviewable_because_pending and
not self.content_review_only)
}
actions['reject'] = {
'method': self.handler.process_sandbox,
'label': _('Reject'),
'details': _('This will reject this version and remove it '
'from the queue. The comments will be sent '
'to the developer.'),
'minimal': False,
'available': actions['public']['available'],
}
actions['approve_content'] = {
'method': self.handler.approve_content,
'label': _('Approve Content'),
'details': _('This records your approbation of the '
'content of the latest public version, '
'without notifying the developer.'),
'minimal': False,
'comments': False,
'available': (
reviewable_because_not_reserved_for_admins_or_user_is_admin and
is_valid_and_listed_and_user_can_content_review
),
}
actions['confirm_auto_approved'] = {
'method': self.handler.confirm_auto_approved,
'label': _('Confirm Approval'),
'details': _('The latest public version of this add-on was '
'automatically approved. This records your '
'confirmation of the approval of that version, '
'without notifying the developer.'),
'minimal': True,
'comments': False,
'available': (
reviewable_because_not_reserved_for_admins_or_user_is_admin and
(was_auto_approved_and_user_can_post_review or
is_unlisted_and_user_can_review_unlisted))
}
actions['reject_multiple_versions'] = {
'method': self.handler.reject_multiple_versions,
'label': _('Reject Multiple Versions'),
'minimal': True,
'versions': True,
'details': _('This will reject the selected public '
'versions. The comments will be sent to the '
'developer.'),
'available': (
self.addon.type != amo.ADDON_STATICTHEME and
reviewable_because_not_reserved_for_admins_or_user_is_admin and
(is_public_and_listed_and_user_can_post_review or
is_valid_and_listed_and_user_can_content_review)
)
}
actions['reply'] = {
'method': self.handler.reviewer_reply,
'label': _('Reviewer reply'),
'details': _('This will send a message to the developer. '
'You will be notified when they reply.'),
'minimal': True,
'available': self.version is not None,
}
actions['super'] = {
'method': self.handler.process_super_review,
'label': _('Request super-review'),
'details': _('If you have concerns about this add-on that '
'an admin reviewer should look into, enter '
'your comments in the area below. They will '
'not be sent to the developer.'),
'minimal': True,
'available': self.version is not None,
}
actions['comment'] = {
'method': self.handler.process_comment,
'label': _('Comment'),
'details': _('Make a comment on this version. The developer '
'won\'t be able to see this.'),
'minimal': True,
'available': True,
}
return OrderedDict(
((key, action) for key, action in actions.items()
if action['available'])
)
def process(self):
action = self.handler.data.get('action', '')
if not action:
raise NotImplementedError
return self.actions[action]['method']()
class ReviewBase(object):
def __init__(self, request, addon, version, review_type,
content_review_only=False):
self.request = request
if request:
self.user = self.request.user
else:
# Use the addons team go-to user "Mozilla" for the automatic
# validations.
self.user = UserProfile.objects.get(pk=settings.TASK_USER_ID)
self.addon = addon
self.version = version
self.review_type = (
('theme_%s' if addon.type == amo.ADDON_STATICTHEME
else 'extension_%s') % review_type)
self.files = self.version.unreviewed_files if self.version else []
self.content_review_only = content_review_only
def set_addon(self, **kw):
"""Alters addon and sets reviewed timestamp on version."""
self.addon.update(**kw)
self.version.update(reviewed=datetime.now())
def set_data(self, data):
self.data = data
if 'addon_files' in data:
self.files = data['addon_files']
def set_files(self, status, files, hide_disabled_file=False):
"""Change the files to be the new status."""
for file in files:
file.datestatuschanged = datetime.now()
file.reviewed = datetime.now()
if hide_disabled_file:
file.hide_disabled_file()
file.status = status
file.save()
def set_recommended(self):
try:
item = self.addon.discoveryitem
except DiscoveryItem.DoesNotExist:
return
if item.recommendable:
# These addons shouldn't be be attempted for auto approval anyway,
# but double check that the cron job isn't trying to approve it.
assert not self.user.id == settings.TASK_USER_ID
self.version.update(recommendation_approved=True)
def log_action(self, action, version=None, files=None,
timestamp=None):
details = {'comments': self.data['comments'],
'reviewtype': self.review_type.split('_')[1]}
if files is None and self.files:
files = self.files
if files is not None:
details['files'] = [f.id for f in files]
if version is None and self.version:
version = self.version
if version is not None:
details['version'] = version.version
args = (self.addon, version)
else:
args = (self.addon,)
if timestamp is None:
timestamp = datetime.now()
kwargs = {'user': self.user, 'created': timestamp,
'details': details}
self.log_entry = ActivityLog.create(action, *args, **kwargs)
def notify_email(self, template, subject,
perm_setting='reviewer_reviewed', version=None):
"""Notify the authors that their addon has been reviewed."""
if version is None:
version = self.version
data = self.data.copy() if self.data else {}
data.update(self.get_context_data())
data['tested'] = ''
os, app = data.get('operating_systems'), data.get('applications')
if os and app:
data['tested'] = 'Tested on %s with %s' % (os, app)
elif os and not app:
data['tested'] = 'Tested on %s' % os
elif not os and app:
data['tested'] = 'Tested with %s' % app
subject = subject % (data['name'],
self.version.version if self.version else '')
unique_id = (self.log_entry.id if hasattr(self, 'log_entry')
else random.randrange(100000))
message = loader.get_template(
'reviewers/emails/%s.ltxt' % template).render(data)
send_activity_mail(
subject, message, version, self.addon.authors.all(),
settings.ADDONS_EMAIL, unique_id, perm_setting=perm_setting)
def get_context_data(self):
addon_url = self.addon.get_url_path(add_prefix=False)
# We need to display the name in some language that is relevant to the
# recipient(s) instead of using the reviewer's. addon.default_locale
# should work.
if self.addon.name.locale != self.addon.default_locale:
lang = to_language(self.addon.default_locale)
with translation.override(lang):
addon = Addon.unfiltered.get(pk=self.addon.pk)
else:
addon = self.addon
review_url_kw = {'addon_id': self.addon.pk}
if (self.version and
self.version.channel == amo.RELEASE_CHANNEL_UNLISTED):
review_url_kw['channel'] = 'unlisted'
dev_ver_url = reverse(
'devhub.addons.versions',
args=[self.addon.id])
else:
dev_ver_url = self.addon.get_dev_url('versions')
return {'name': addon.name,
'number': self.version.version if self.version else '',
'reviewer': self.user.reviewer_name or self.user.name,
'addon_url': absolutify(addon_url),
'dev_versions_url': absolutify(dev_ver_url),
'review_url': absolutify(reverse('reviewers.review',
kwargs=review_url_kw,
add_prefix=False)),
'comments': self.data.get('comments'),
'SITE_URL': settings.SITE_URL}
def reviewer_reply(self):
# Default to reviewer reply action.
action = amo.LOG.REVIEWER_REPLY_VERSION
if self.version:
if (self.version.channel == amo.RELEASE_CHANNEL_UNLISTED and
not self.version.reviewed):
self.version.update(reviewed=datetime.now())
if self.data.get('info_request'):
# It's an information request and not just a simple reply.
# The ActivityLog will be different...
action = amo.LOG.REQUEST_INFORMATION
# And the deadline for the info request will be created or
# updated x days in the future.
info_request_deadline_days = int(
self.data.get('info_request_deadline', 7))
info_request_deadline = (
datetime.now() + timedelta(days=info_request_deadline_days)
)
# Update or create the reviewer flags, overwriting
# self.addon.addonreviewerflags with the one we
# create/update so that we don't use an older version of it
# later when notifying. Also, since this is a new request,
# clear out the notified_about_expiring_info_request field.
self.addon.addonreviewerflags = (
AddonReviewerFlags.objects.update_or_create(
addon=self.addon, defaults={
'pending_info_request': info_request_deadline,
'notified_about_expiring_info_request': False,
}
)[0]
)
log.info(u'Sending reviewer reply for %s to authors and other'
u'recipients' % self.addon)
log_and_notify(
action, self.data['comments'], self.user, self.version,
perm_setting='individual_contact',
detail_kwargs={'reviewtype': self.review_type.split('_')[1]})
def process_comment(self):
self.log_action(amo.LOG.COMMENT_VERSION)
update_reviewed = (
self.version and
self.version.channel == amo.RELEASE_CHANNEL_UNLISTED and
not self.version.reviewed)
if update_reviewed:
self.version.update(reviewed=datetime.now())
def process_public(self):
"""Set an add-on or a version to public."""
# Safeguard to force implementation for unlisted add-ons to completely
# override this method.
assert self.version.channel == amo.RELEASE_CHANNEL_LISTED
# Safeguard to make sure this action is not used for content review
# (it should use confirm_auto_approved instead).
assert not self.content_review_only
# Sign addon.
for file_ in self.files:
sign_file(file_)
# Hold onto the status before we change it.
status = self.addon.status
# Save files first, because set_addon checks to make sure there
# is at least one public file or it won't make the addon public.
self.set_files(amo.STATUS_APPROVED, self.files)
self.set_recommended()
if self.set_addon_status:
self.set_addon(status=amo.STATUS_APPROVED)
# Increment approvals counter if we have a request (it means it's a
# human doing the review) otherwise reset it as it's an automatic
# approval.
if self.request:
AddonApprovalsCounter.increment_for_addon(addon=self.addon)
else:
AddonApprovalsCounter.reset_for_addon(addon=self.addon)
self.log_action(amo.LOG.APPROVE_VERSION)
template = u'%s_to_approved' % self.review_type
if self.review_type in ['extension_pending', 'theme_pending']:
subject = u'Mozilla Add-ons: %s %s Updated'
else:
subject = u'Mozilla Add-ons: %s %s Approved'
self.notify_email(template, subject)
self.log_public_message()
log.info(u'Sending email for %s' % (self.addon))
# Assign reviewer incentive scores.
if self.request:
ReviewerScore.award_points(
self.request.user, self.addon, status, version=self.version)
def process_sandbox(self):
"""Set an addon or a version back to sandbox."""
# Safeguard to force implementation for unlisted add-ons to completely
# override this method.
assert self.version.channel == amo.RELEASE_CHANNEL_LISTED
# Safeguard to make sure this action is not used for content review
# (it should use reject_multiple_versions instead).
assert not self.content_review_only
# Hold onto the status before we change it.
status = self.addon.status
if self.set_addon_status:
self.set_addon(status=amo.STATUS_NULL)
self.set_files(amo.STATUS_DISABLED, self.files,
hide_disabled_file=True)
self.log_action(amo.LOG.REJECT_VERSION)
template = u'%s_to_rejected' % self.review_type
subject = u'Mozilla Add-ons: %s %s didn\'t pass review'
self.notify_email(template, subject)
self.log_sandbox_message()
log.info(u'Sending email for %s' % (self.addon))
# Assign reviewer incentive scores.
if self.request:
ReviewerScore.award_points(
self.request.user, self.addon, status, version=self.version)
def process_super_review(self):
"""Mark an add-on as needing admin code, content, or theme review."""
addon_type = self.addon.type
if addon_type == amo.ADDON_STATICTHEME:
needs_admin_property = 'needs_admin_theme_review'
log_action_type = amo.LOG.REQUEST_ADMIN_REVIEW_THEME
elif self.content_review_only:
needs_admin_property = 'needs_admin_content_review'
log_action_type = amo.LOG.REQUEST_ADMIN_REVIEW_CONTENT
else:
needs_admin_property = 'needs_admin_code_review'
log_action_type = amo.LOG.REQUEST_ADMIN_REVIEW_CODE
AddonReviewerFlags.objects.update_or_create(
addon=self.addon, defaults={needs_admin_property: True})
self.log_action(log_action_type)
log.info(u'%s for %s' % (log_action_type.short, self.addon))
def approve_content(self):
"""Approve content of an add-on."""
channel = self.version.channel
version = self.addon.current_version
# Content review only action.
assert self.content_review_only
# Doesn't make sense for unlisted versions.
assert channel == amo.RELEASE_CHANNEL_LISTED
# Like confirm auto approval, the approve content action should not
# show the comment box, so override the text in case the reviewer
# switched between actions and accidently submitted some comments from
# another action.
self.data['comments'] = ''
# When doing a content review, don't increment the approvals counter,
# just record the date of the content approval and log it.
AddonApprovalsCounter.approve_content_for_addon(addon=self.addon)
self.log_action(amo.LOG.APPROVE_CONTENT, version=version)
# Assign reviewer incentive scores.
if self.request:
is_post_review = channel == amo.RELEASE_CHANNEL_LISTED
ReviewerScore.award_points(
self.request.user, self.addon, self.addon.status,
version=version, post_review=is_post_review,
content_review=self.content_review_only)
def confirm_auto_approved(self):
"""Confirm an auto-approval decision."""
channel = self.version.channel
if channel == amo.RELEASE_CHANNEL_LISTED:
# When doing an approval in listed channel, the version we care
# about is always current_version and *not* self.version.
# This allows reviewers to confirm approval of a public add-on even
# when their latest version is disabled.
version = self.addon.current_version
else:
# For unlisted, we just use self.version.
version = self.version
# The confirm auto-approval action should not show the comment box,
# so override the text in case the reviewer switched between actions
# and accidently submitted some comments from another action.
self.data['comments'] = ''
if channel == amo.RELEASE_CHANNEL_LISTED:
version.autoapprovalsummary.update(confirmed=True)
AddonApprovalsCounter.increment_for_addon(addon=self.addon)
self.log_action(amo.LOG.CONFIRM_AUTO_APPROVED, version=version)
# Assign reviewer incentive scores.
if self.request:
is_post_review = channel == amo.RELEASE_CHANNEL_LISTED
ReviewerScore.award_points(
self.request.user, self.addon, self.addon.status,
version=version, post_review=is_post_review,
content_review=self.content_review_only)
def reject_multiple_versions(self):
"""Reject a list of versions."""
# self.version and self.files won't point to the versions we want to
# modify in this action, so set them to None before finding the right
# versions.
status = self.addon.status
latest_version = self.version
self.version = None
self.files = None
action_id = (amo.LOG.REJECT_CONTENT if self.content_review_only
else amo.LOG.REJECT_VERSION)
timestamp = datetime.now()
for version in self.data['versions']:
files = version.files.all()
self.set_files(amo.STATUS_DISABLED, files, hide_disabled_file=True)
self.log_action(action_id, version=version, files=files,
timestamp=timestamp)
self.addon.update_status()
self.data['version_numbers'] = u', '.join(
str(v.version) for v in self.data['versions'])
# Send the email to the developer. We need to pass the latest version
# of the add-on instead of one of the versions we rejected, it will be
# used to generate a token allowing the developer to reply, and that
# only works with the latest version.
if self.addon.status != amo.STATUS_APPROVED:
template = u'reject_multiple_versions_disabled_addon'
subject = (u'Mozilla Add-ons: %s%s has been disabled on '
u'addons.mozilla.org')
else:
template = u'reject_multiple_versions'
subject = u'Mozilla Add-ons: Versions disabled for %s%s'
self.notify_email(template, subject, version=latest_version)
log.info(
u'Making %s versions %s disabled' % (
self.addon,
u', '.join(str(v.pk) for v in self.data['versions'])))
log.info(u'Sending email for %s' % (self.addon))
# Assign reviewer incentive scores.
if self.request:
ReviewerScore.award_points(
self.request.user, self.addon, status, version=latest_version,
post_review=True, content_review=self.content_review_only)
class ReviewAddon(ReviewBase):
set_addon_status = True
def log_public_message(self):
log.info(u'Making %s public' % (self.addon))
def log_sandbox_message(self):
log.info(u'Making %s disabled' % (self.addon))
class ReviewFiles(ReviewBase):
set_addon_status = False
def log_public_message(self):
log.info(u'Making %s files %s public' %
(self.addon, ', '.join([f.filename for f in self.files])))
def log_sandbox_message(self):
log.info(u'Making %s files %s disabled' %
(self.addon, ', '.join([f.filename for f in self.files])))
class ReviewUnlisted(ReviewBase):
def process_public(self):
"""Set an unlisted addon version files to public."""
assert self.version.channel == amo.RELEASE_CHANNEL_UNLISTED
# Sign addon.
for file_ in self.files:
sign_file(file_)
self.set_files(amo.STATUS_APPROVED, self.files)
template = u'unlisted_to_reviewed_auto'
subject = u'Mozilla Add-ons: %s %s signed and ready to download'
self.log_action(amo.LOG.APPROVE_VERSION)
self.notify_email(template, subject, perm_setting=None)
log.info(u'Making %s files %s public' %
(self.addon, ', '.join([f.filename for f in self.files])))
log.info(u'Sending email for %s' % (self.addon))
|
import React from "react"
import useSiteMetadata from "../static_queries/useSiteMetadata"
export default function Header(props) {
const siteMetadata = useSiteMetadata()
return (
<div className="header-container md:invisible md:h-0 lg:invisible lg:h-0">
<h1 className="header-title text-5xl logo-font text-center p-4 text-white">
{siteMetadata.title}
</h1>
<p className="header-title text-s text-center text-white">
{siteMetadata.description}
</p>
</div>
)
}
|
from django.db import models
from django.db.models import F, Max, Min, Q
from django.db.transaction import atomic
from django.contrib.contenttypes.models import ContentType
from django.core import signing
from django.dispatch import receiver
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from django_comments.managers import CommentManager
from django_comments.models import Comment, CommentFlag
from django_comments.signals import comment_was_flagged
from django_comments_xtd.conf import settings
LIKEDIT_FLAG = "I liked it"
DISLIKEDIT_FLAG = "I disliked it"
def max_thread_level_for_content_type(content_type):
app_model = "%s.%s" % (content_type.app_label, content_type.model)
if app_model in settings.COMMENTS_XTD_MAX_THREAD_LEVEL_BY_APP_MODEL:
return settings.COMMENTS_XTD_MAX_THREAD_LEVEL_BY_APP_MODEL[app_model]
else:
return settings.COMMENTS_XTD_MAX_THREAD_LEVEL
class MaxThreadLevelExceededException(Exception):
def __init__(self, comment):
self.comment = comment
# self.max_by_app = max_thread_level_for_content_type(content_type)
def __str__(self):
return ("Max thread level reached for comment %d" % self.comment.id)
class XtdCommentManager(CommentManager):
def for_app_models(self, *args, **kwargs):
"""Return XtdComments for pairs "app.model" given in args"""
content_types = []
for app_model in args:
app, model = app_model.split(".")
content_types.append(ContentType.objects.get(app_label=app,
model=model))
return self.for_content_types(content_types, **kwargs)
def for_content_types(self, content_types, site=None):
filter_fields = {'content_type__in': content_types}
if site is not None:
filter_fields['site'] = site
qs = self.get_queryset().filter(**filter_fields)\
.reverse()
return qs
def get_queryset(self):
qs = super(XtdCommentManager, self).get_queryset()
return qs.\
select_related('user', 'content_type').\
order_by(*settings.COMMENTS_XTD_LIST_ORDER)
class XtdComment(Comment):
thread_id = models.IntegerField(default=0, db_index=True)
parent_id = models.IntegerField(default=0)
level = models.SmallIntegerField(default=0)
order = models.IntegerField(default=1, db_index=True)
followup = models.BooleanField(blank=True, default=False,
help_text=_("Notify follow-up comments"))
objects = XtdCommentManager()
def save(self, *args, **kwargs):
is_new = self.pk is None
super(Comment, self).save(*args, **kwargs)
if is_new:
if not self.parent_id:
self.parent_id = self.id
self.thread_id = self.id
else:
if max_thread_level_for_content_type(self.content_type):
with atomic():
self._calculate_thread_data()
else:
raise MaxThreadLevelExceededException(self)
kwargs["force_insert"] = False
super(Comment, self).save(*args, **kwargs)
def _calculate_thread_data(self):
# Implements the following approach:
# http://www.sqlteam.com/article/sql-for-threaded-discussion-forums
parent = XtdComment.objects.get(pk=self.parent_id)
if parent.level == max_thread_level_for_content_type(self.content_type):
raise MaxThreadLevelExceededException(self)
self.thread_id = parent.thread_id
self.level = parent.level + 1
qc_eq_thread = XtdComment.objects.filter(thread_id=parent.thread_id)
qc_ge_level = qc_eq_thread.filter(level__lte=parent.level,
order__gt=parent.order)
if qc_ge_level.count():
min_order = qc_ge_level.aggregate(Min('order'))['order__min']
XtdComment.objects.filter(thread_id=parent.thread_id,
order__gte=min_order)\
.update(order=F('order') + 1)
self.order = min_order
else:
max_order = qc_eq_thread.aggregate(Max('order'))['order__max']
self.order = max_order + 1
def get_reply_url(self):
return reverse("comments-xtd-reply", kwargs={"cid": self.pk})
def allow_thread(self):
if self.level < max_thread_level_for_content_type(self.content_type):
return True
else:
return False
@classmethod
def tree_from_queryset(cls, queryset, with_flagging=False,
with_feedback=False, user=None):
"""Converts a XtdComment queryset into a list of nested dictionaries.
The queryset has to be ordered by thread_id, order.
Each dictionary contains two attributes::
{
'comment': the comment object itself,
'children': [list of child comment dictionaries]
}
"""
def get_user_feedback(comment, user):
d = {'likedit_users': comment.users_flagging(LIKEDIT_FLAG),
'dislikedit_users': comment.users_flagging(DISLIKEDIT_FLAG)}
if user is not None:
if user in d['likedit_users']:
d['likedit'] = True
if user in d['dislikedit_users']:
d['dislikedit'] = True
return d
def add_children(children, obj, user):
for item in children:
if item['comment'].pk == obj.parent_id:
child_dict = {'comment': obj, 'children': []}
if with_feedback:
child_dict.update(get_user_feedback(obj, user))
item['children'].append(child_dict)
return True
elif item['children']:
if add_children(item['children'], obj, user):
return True
return False
def get_new_dict(obj):
new_dict = {'comment': obj, 'children': []}
if with_feedback:
new_dict.update(get_user_feedback(obj, user))
if with_flagging:
users_flagging = obj.users_flagging(CommentFlag.SUGGEST_REMOVAL)
if user.has_perm('django_comments.can_moderate'):
new_dict.update({'flagged_count': len(users_flagging)})
new_dict.update({'flagged': user in users_flagging})
return new_dict
dic_list = []
cur_dict = None
for obj in queryset:
if cur_dict and obj.level == cur_dict['comment'].level:
dic_list.append(cur_dict)
cur_dict = None
if not cur_dict:
cur_dict = get_new_dict(obj)
continue
if obj.parent_id == cur_dict['comment'].pk:
child_dict = get_new_dict(obj)
cur_dict['children'].append(child_dict)
else:
add_children(cur_dict['children'], obj, user)
if cur_dict:
dic_list.append(cur_dict)
return dic_list
def users_flagging(self, flag):
return [obj.user for obj in self.flags.filter(flag=flag)]
@receiver(comment_was_flagged)
def unpublish_nested_comments_on_removal_flag(sender, comment, flag, **kwargs):
if flag.flag == CommentFlag.MODERATOR_DELETION:
XtdComment.objects.filter(~(Q(pk=comment.id)), parent_id=comment.id)\
.update(is_public=False)
class DummyDefaultManager:
"""
Dummy Manager to mock django's CommentForm.check_for_duplicate method.
"""
def __getattr__(self, name):
return lambda *args, **kwargs: []
def using(self, *args, **kwargs):
return self
class TmpXtdComment(dict):
"""
Temporary XtdComment to be pickled, ziped and appended to a URL.
"""
_default_manager = DummyDefaultManager()
def __getattr__(self, key):
try:
return self[key]
except KeyError:
return None
def __setattr__(self, key, value):
self[key] = value
def save(self, *args, **kwargs):
pass
def _get_pk_val(self):
if self.xtd_comment:
return self.xtd_comment._get_pk_val()
else:
content_type = "%s.%s" % self.content_type.natural_key()
return signing.dumps("%s:%s" % (content_type, self.object_pk))
def __setstate__(self, state):
ct_key = state.pop('content_type_key')
ctype = ContentType.objects.get_by_natural_key(*ct_key)
self.update(
state,
content_type=ctype,
content_object=ctype.get_object_for_this_type(
pk=state['object_pk']
)
)
def __reduce__(self):
state = {k: v for k, v in self.items() if k != 'content_object'}
ct = state.pop('content_type')
state['content_type_key'] = ct.natural_key()
return (TmpXtdComment, (), state)
# ----------------------------------------------------------------------
class BlackListedDomain(models.Model):
"""
A blacklisted domain from which comments should be discarded.
Automatically populated with a small amount of spamming domains,
gathered from http://www.joewein.net/spam/blacklist.htm
You can download for free a recent version of the list, and subscribe
to get notified on changes. Changes can be fetched with rsync for a
small fee (check their conditions, or use any other Spam filter).
"""
domain = models.CharField(max_length=200, db_index=True)
def __str__(self):
return self.domain
class Meta:
ordering = ('domain',)
|
/*
* Copyright 2017 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#ifndef GrProcessorSet_DEFINED
#define GrProcessorSet_DEFINED
#include "include/private/SkTemplates.h"
#include "src/gpu/GrFragmentProcessor.h"
#include "src/gpu/GrPaint.h"
#include "src/gpu/GrProcessorAnalysis.h"
#include "src/gpu/GrXferProcessor.h"
struct GrUserStencilSettings;
class GrAppliedClip;
class GrXPFactory;
class GrProcessorSet {
private:
// Arbitrary constructor arg for empty set and analysis
enum class Empty { kEmpty };
public:
GrProcessorSet(GrPaint&&);
GrProcessorSet(SkBlendMode);
GrProcessorSet(std::unique_ptr<GrFragmentProcessor> colorFP);
GrProcessorSet(GrProcessorSet&&);
GrProcessorSet(const GrProcessorSet&) = delete;
GrProcessorSet& operator=(const GrProcessorSet&) = delete;
~GrProcessorSet();
bool hasColorFragmentProcessor() const { return fColorFragmentProcessor != nullptr; }
bool hasCoverageFragmentProcessor() const { return fCoverageFragmentProcessor != nullptr; }
const GrFragmentProcessor* colorFragmentProcessor() const {
return fColorFragmentProcessor.get();
}
const GrFragmentProcessor* coverageFragmentProcessor() const {
return fCoverageFragmentProcessor.get();
}
const GrXferProcessor* xferProcessor() const {
SkASSERT(this->isFinalized());
return fXP.fProcessor;
}
sk_sp<const GrXferProcessor> refXferProcessor() const {
SkASSERT(this->isFinalized());
return sk_ref_sp(fXP.fProcessor);
}
std::unique_ptr<GrFragmentProcessor> detachColorFragmentProcessor() {
return std::move(fColorFragmentProcessor);
}
std::unique_ptr<GrFragmentProcessor> detachCoverageFragmentProcessor() {
return std::move(fCoverageFragmentProcessor);
}
/** Comparisons are only legal on finalized processor sets. */
bool operator==(const GrProcessorSet& that) const;
bool operator!=(const GrProcessorSet& that) const { return !(*this == that); }
/**
* This is used to report results of processor analysis when a processor set is finalized (see
* below).
*/
class Analysis {
public:
Analysis(const Analysis&) = default;
Analysis() { *reinterpret_cast<uint32_t*>(this) = 0; }
bool isInitialized() const { return fIsInitialized; }
bool usesLocalCoords() const { return fUsesLocalCoords; }
bool requiresDstTexture() const { return fRequiresDstTexture; }
bool requiresNonOverlappingDraws() const { return fRequiresNonOverlappingDraws; }
bool isCompatibleWithCoverageAsAlpha() const { return fCompatibleWithCoverageAsAlpha; }
// Indicates whether all color fragment processors were eliminated in the analysis.
bool hasColorFragmentProcessor() const { return fHasColorFragmentProcessor; }
bool inputColorIsIgnored() const { return fInputColorType == kIgnored_InputColorType; }
bool inputColorIsOverridden() const { return fInputColorType == kOverridden_InputColorType; }
bool usesNonCoherentHWBlending() const { return fUsesNonCoherentHWBlending; }
private:
constexpr Analysis(Empty)
: fUsesLocalCoords(false),
fCompatibleWithCoverageAsAlpha(true),
fRequiresDstTexture(false),
fRequiresNonOverlappingDraws(false),
fHasColorFragmentProcessor(false),
fIsInitialized(true),
fUsesNonCoherentHWBlending(false),
fInputColorType(kOriginal_InputColorType) {}
enum InputColorType : uint32_t {
kOriginal_InputColorType,
kOverridden_InputColorType,
kIgnored_InputColorType
};
// MSVS 2015 won't pack different underlying types
using PackedBool = uint32_t;
using PackedInputColorType = uint32_t;
PackedBool fUsesLocalCoords : 1;
PackedBool fCompatibleWithCoverageAsAlpha : 1;
PackedBool fRequiresDstTexture : 1;
PackedBool fRequiresNonOverlappingDraws : 1;
PackedBool fHasColorFragmentProcessor : 1;
PackedBool fIsInitialized : 1;
PackedBool fUsesNonCoherentHWBlending : 1;
PackedInputColorType fInputColorType : 2;
friend class GrProcessorSet;
};
static_assert(sizeof(Analysis) <= sizeof(uint32_t));
/**
* This analyzes the processors given an op's input color and coverage as well as a clip. The
* state of the processor set may change to an equivalent but more optimal set of processors.
* This new state requires that the caller respect the returned 'inputColorOverride'. This is
* indicated by the returned Analysis's inputColorIsOverridden(). 'inputColorOverride' will not
* be written if the analysis does not override the input color.
*
* This must be called before the processor set is used to construct a GrPipeline and may only
* be called once.
*
* This also puts the processors in "pending execution" state and must be called when an op
* that owns a processor set is recorded to ensure pending and writes are propagated to
* resources referred to by the processors. Otherwise, data hazards may occur.
*/
Analysis finalize(
const GrProcessorAnalysisColor&, const GrProcessorAnalysisCoverage, const GrAppliedClip*,
const GrUserStencilSettings*, bool hasMixedSampledCoverage, const GrCaps&, GrClampType,
SkPMColor4f* inputColorOverride);
bool isFinalized() const { return SkToBool(kFinalized_Flag & fFlags); }
/** These are valid only for non-LCD coverage. */
static const GrProcessorSet& EmptySet();
static GrProcessorSet MakeEmptySet();
static constexpr Analysis EmptySetAnalysis() { return Analysis(Empty::kEmpty); }
#if GR_TEST_UTILS
SkString dumpProcessors() const;
#endif
void visitProxies(const GrOp::VisitProxyFunc& func) const;
private:
GrProcessorSet(Empty) : fXP((const GrXferProcessor*)nullptr), fFlags(kFinalized_Flag) {}
int numFragmentProcessors() const {
return (fColorFragmentProcessor ? 1 : 0) + (fCoverageFragmentProcessor ? 1 : 0);
}
enum Flags : uint16_t { kFinalized_Flag = 0x1 };
union XP {
XP(const GrXPFactory* factory) : fFactory(factory) {}
XP(const GrXferProcessor* processor) : fProcessor(processor) {}
explicit XP(XP&& that) : fProcessor(that.fProcessor) {
SkASSERT(fProcessor == that.fProcessor);
that.fProcessor = nullptr;
}
const GrXPFactory* fFactory;
const GrXferProcessor* fProcessor;
};
const GrXPFactory* xpFactory() const {
SkASSERT(!this->isFinalized());
return fXP.fFactory;
}
std::unique_ptr<GrFragmentProcessor> fColorFragmentProcessor;
std::unique_ptr<GrFragmentProcessor> fCoverageFragmentProcessor;
XP fXP;
uint8_t fFlags = 0;
};
#endif
|
from typing import List
from ..shared import *
DEBUG = True
ALLOWED_HOSTS: List[str] = []
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'b=c(untxoz5s!9sudc9u!)b%(w=029(0d2pzodl04m(3x35e=l'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'app',
'USER': 'localroot',
'PASSWORD': 'localrootpass',
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
INSTALLED_APPS += ['drf_yasg',]
CORS_ORIGIN_ALLOW_ALL = True
|
#pragma once
#include "MreshEngine/Events/Event.h"
#include "MreshEngine/Core/Input.h"
namespace MreshEngine
{
class MouseMovedEvent : public Event
{
public:
MouseMovedEvent(float x, float y)
: m_MouseX(x), m_MouseY(y) {}
inline float GetX() const { return m_MouseX; }
inline float GetY() const { return m_MouseY; }
std::string ToString() const override
{
std::stringstream ss;
ss << "MouseMovedEvent: " << m_MouseX << ", " << m_MouseY;
return ss.str();
}
EVENT_CLASS_TYPE(MouseMoved)
EVENT_CLASS_CATEGORY(EventCategoryMouse | EventCategoryInput)
private:
float m_MouseX, m_MouseY;
};
class MouseScrolledEvent : public Event
{
public:
MouseScrolledEvent(float xOffset, float yOffset)
: m_XOffset(xOffset), m_YOffset(yOffset) {}
inline float GetXOffset() const { return m_XOffset; }
inline float GetYOffset() const { return m_YOffset; }
std::string ToString() const override
{
std::stringstream ss;
ss << "MouseScrolledEvent: " << GetXOffset() << ", " << GetYOffset();
return ss.str();
}
EVENT_CLASS_TYPE(MouseScrolled)
EVENT_CLASS_CATEGORY(EventCategoryMouse | EventCategoryInput)
private:
float m_XOffset, m_YOffset;
};
class MouseButtonEvent : public Event
{
public:
inline MouseCode GetMouseButton() const { return m_Button; }
EVENT_CLASS_CATEGORY(EventCategoryMouse | EventCategoryInput)
protected:
MouseButtonEvent(MouseCode button)
: m_Button(button) {}
MouseCode m_Button;
};
class MouseButtonPressedEvent : public MouseButtonEvent
{
public:
MouseButtonPressedEvent(MouseCode button)
: MouseButtonEvent(button) {}
std::string ToString() const override
{
std::stringstream ss;
ss << "MouseButtonPressedEvent: " << m_Button;
return ss.str();
}
EVENT_CLASS_TYPE(MouseButtonPressed)
};
class MouseButtonReleasedEvent : public MouseButtonEvent
{
public:
MouseButtonReleasedEvent(MouseCode button)
: MouseButtonEvent(button) {}
std::string ToString() const override
{
std::stringstream ss;
ss << "MouseButtonReleasedEvent: " << m_Button;
return ss.str();
}
EVENT_CLASS_TYPE(MouseButtonReleased)
};
} |
import React from 'react'
import styled, { keyframes } from 'styled-components'
import { H1, MediumText } from '@styles/TextStyles'
import { themes } from '@styles/ColorStyles'
import PurchaseButton from '@components/buttons/PurchaseButton'
import MockupAnimation from '@components/animations/mockupAnimation'
import WaveBackground from '@components/backgrounds/WaveBackground'
export default function HeroSection() {
return (
<Wrapper>
<WaveBackground />
<ContentWrapper>
<TextWrapper>
<Title>
Design
<br />
and code <span>React</span> apps
</Title>
<Description>
Don't skip design. Learn design and code, by building real apps with
React and Swift. Complete courses about the best tools.
</Description>
<PurchaseButton
title="Start Learning"
subtitle="120+ hours of video"
/>
</TextWrapper>
<MockupAnimation />
</ContentWrapper>
</Wrapper>
)
}
const animation = keyframes`
from {
opacity: 0;
transform: translateY(-10px);
filter: blur(10px);
}
to {
opacity: 1;
transform: translateY(0);
filter: blur(0);
}
`
const Wrapper = styled.section`
overflow: hidden;
`
const ContentWrapper = styled.div`
max-width: 1234px;
margin: 0 auto;
padding: 200px 32px 200px 56px;
display: grid;
grid-template-columns: auto 1fr;
gap: 64px;
@media (max-width: 720px) {
grid-template-columns: auto;
padding: 160px 40px 240px 40px;
}
@media (max-width: 450px) {
padding: 160px 24px 240px 24px;
}
`
const TextWrapper = styled.div`
max-width: 360px;
display: grid;
gap: 30px;
& > * {
opacity: 0;
animation: ${animation} 1.6s forwards;
:nth-child(1) {
animation-delay: 0s;
}
:nth-child(2) {
animation-delay: 0.2s;
}
:nth-child(3) {
animation-delay: 0.4s;
}
}
`
const Title = styled(H1)`
color: ${themes.dark.text1};
background: linear-gradient(180deg, #730040 0%, #301cbe 100%);
-webkit-background-clip: text;
background-clip: text;
-webkit-text-fill-color: transparent;
color: transparent;
span {
background: linear-gradient(180deg, #ffd7ff 0%, #ffb6ff 100%);
-webkit-background-clip: text;
background-clip: text;
-webkit-text-fill-color: transparent;
color: transparent;
}
`
const Description = styled(MediumText)``
|
# -*- coding: utf-8 -*-
# Copyright 2018-2021 CERN
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# - Tomas Javurek <[email protected]>, 2018-2021
# - Vincent Garonne <[email protected]>, 2018
# - Joaquín Bogado <[email protected]>, 2018
# - Nicolo Magini <[email protected]>, 2018-2019
# - Tobias Wegner <[email protected]>, 2018-2019
# - Martin Barisits <[email protected]>, 2018-2021
# - Hannes Hansen <[email protected]>, 2018-2019
# - David Cameron <[email protected]>, 2019
# - Gabriele Fronze' <[email protected]>, 2019
# - Brandon White <[email protected]>, 2019
# - Jaroslav Guenther <[email protected]>, 2019
# - Andrew Lister <[email protected]>, 2019
# - Eli Chadwick <[email protected]>, 2020
# - Benedikt Ziemons <[email protected]>, 2020
# - Thomas Beermann <[email protected]>, 2021
# - Radu Carpa <[email protected]>, 2021
# - Rakshita Varadarajan <[email protected]>, 2021
# - David Población Criado <[email protected]>, 2021
# - Cedric Serfon <[email protected]>, 2021
from __future__ import division
import copy
import itertools
import logging
import os
import random
import shutil
import signal
import time
try:
from Queue import Queue, Empty, deque
except ImportError:
from queue import Queue, Empty, deque
from threading import Thread
from rucio.client.client import Client
from rucio.common.config import config_get
from rucio.common.exception import (InputValidationError, NoFilesDownloaded, NotAllFilesDownloaded, RucioException)
from rucio.common.didtype import DIDType
from rucio.common.pcache import Pcache
from rucio.common.utils import adler32, detect_client_location, generate_uuid, parse_replicas_from_string, \
send_trace, sizefmt, execute, parse_replicas_from_file, extract_scope
from rucio.common.utils import GLOBALLY_SUPPORTED_CHECKSUMS, CHECKSUM_ALGO_DICT, PREFERRED_CHECKSUM
from rucio.rse import rsemanager as rsemgr
from rucio import version
class BaseExtractionTool:
def __init__(self, program_name, useability_check_args, extract_args, logger=logging.log):
"""
Initialises a extraction tool object
:param program_name: the name of the archive extraction program, e.g., unzip
:param useability_check_args: the arguments of the extraction program to test if its installed, e.g., --version
:param extract_args: the arguments that will be passed to the program for extraction
:param logger: optional decorated logging.log object that can be passed from the calling daemon or client.
"""
self.program_name = program_name
self.useability_check_args = useability_check_args
self.extract_args = extract_args
self.logger = logger
self.is_useable_result = None
def is_useable(self):
"""
Checks if the extraction tool is installed and usable
:returns: True if it is usable otherwise False
"""
if self.is_useable_result is not None:
return self.is_useable_result
self.is_usable_result = False
cmd = '%s %s' % (self.program_name, self.useability_check_args)
try:
exitcode, out, err = execute(cmd)
exitcode = int(exitcode)
self.logger(logging.DEBUG, '"%s" returned with exitcode %d' % (cmd, exitcode))
self.is_usable_result = (exitcode == 0)
except Exception as error:
self.logger(logging.DEBUG, 'Failed to execute: "%s"' % cmd)
self.logger(logging.DEBUG, error)
return self.is_usable_result
def try_extraction(self, archive_file_path, file_to_extract, dest_dir_path):
"""
Calls the extraction program to extract a file from an archive
:param archive_file_path: path to the archive
:param file_to_extract: file name to extract from the archive
:param dest_dir_path: destination directory where the extracted file will be stored
:returns: True on success otherwise False
"""
if not self.is_useable():
return False
args_map = {'archive_file_path': archive_file_path,
'file_to_extract': file_to_extract,
'dest_dir_path': dest_dir_path}
extract_args = self.extract_args % args_map
cmd = '%s %s' % (self.program_name, extract_args)
try:
exitcode, out, err = execute(cmd)
exitcode = int(exitcode)
self.logger(logging.DEBUG, '"%s" returned with exitcode %d' % (cmd, exitcode))
return (exitcode == 0)
except Exception as error:
self.logger(logging.DEBUG, 'Failed to execute: "%s"' % cmd)
self.logger(logging.DEBUG, error)
return False
class DownloadClient:
def __init__(self, client=None, logger=None, tracing=True, check_admin=False, check_pcache=False):
"""
Initialises the basic settings for an DownloadClient object
:param client: Optional: rucio.client.client.Client object. If None, a new object will be created.
:param external_traces: Optional: reference to a list where traces can be added
:param logger: Optional: logging.Logger object. If None, default logger will be used.
"""
self.check_pcache = check_pcache
if not logger:
self.logger = logging.log
else:
self.logger = logger.log
self.tracing = tracing
if not self.tracing:
logger(logging.DEBUG, 'Tracing is turned off.')
self.is_human_readable = True
self.client = client if client else Client()
# if token should be used, use only JWT tokens
self.auth_token = self.client.auth_token if len(self.client.auth_token.split(".")) == 3 else None
self.client_location = detect_client_location()
self.is_tape_excluded = True
self.is_admin = False
if check_admin:
account_attributes = list(self.client.list_account_attributes(self.client.account))
for attr in account_attributes[0]:
if attr['key'] == 'admin':
self.is_admin = attr['value'] is True
break
if self.is_admin:
self.is_tape_excluded = False
logger(logging.DEBUG, 'Admin mode enabled')
self.trace_tpl = {}
self.trace_tpl['hostname'] = self.client_location['fqdn']
self.trace_tpl['localSite'] = self.client_location['site']
self.trace_tpl['account'] = self.client.account
if self.client.vo != 'def':
self.trace_tpl['vo'] = self.client.vo
self.trace_tpl['eventType'] = 'download'
self.trace_tpl['eventVersion'] = 'api_%s' % version.RUCIO_VERSION[0]
self.use_cea_threshold = 10
self.extraction_tools = []
# unzip <archive_file_path> <did_name> -d <dest_dir_path>
extract_args = '%(archive_file_path)s %(file_to_extract)s -d %(dest_dir_path)s'
self.extraction_tools.append(BaseExtractionTool('unzip', '-v', extract_args, logger=self.logger))
# tar -C <dest_dir_path> -xf <archive_file_path> <did_name>
extract_args = '-C %(dest_dir_path)s -xf %(archive_file_path)s %(file_to_extract)s'
self.extraction_tools.append(BaseExtractionTool('tar', '--version', extract_args, logger=self.logger))
self.extract_scope_convention = config_get('common', 'extract_scope', False, None)
def download_pfns(self, items, num_threads=2, trace_custom_fields={}, traces_copy_out=None):
"""
Download items with a given PFN. This function can only download files, no datasets.
:param items: List of dictionaries. Each dictionary describing a file to download. Keys:
pfn - PFN string of this file
did - DID string of this file (e.g. 'scope:file.name'). Wildcards are not allowed
rse - rse name (e.g. 'CERN-PROD_DATADISK'). RSE Expressions are not allowed
base_dir - Optional: Base directory where the downloaded files will be stored. (Default: '.')
no_subdir - Optional: If true, files are written directly into base_dir. (Default: False)
adler32 - Optional: The adler32 checmsum to compare the downloaded files adler32 checksum with
md5 - Optional: The md5 checksum to compare the downloaded files md5 checksum with
transfer_timeout - Optional: Timeout time for the download protocols. (Default: None)
:param num_threads: Suggestion of number of threads to use for the download. It will be lowered if it's too high.
:param trace_custom_fields: Custom key value pairs to send with the traces
:param traces_copy_out: reference to an external list, where the traces should be uploaded
:returns: a list of dictionaries with an entry for each file, containing the input options, the did, and the clientState
clientState can be one of the following: ALREADY_DONE, DONE, FILE_NOT_FOUND, FAIL_VALIDATE, FAILED
:raises InputValidationError: if one of the input items is in the wrong format
:raises NoFilesDownloaded: if no files could be downloaded
:raises NotAllFilesDownloaded: if not all files could be downloaded
:raises RucioException: if something unexpected went wrong during the download
"""
logger = self.logger
trace_custom_fields['uuid'] = generate_uuid()
logger(logging.INFO, 'Processing %d item(s) for input' % len(items))
input_items = []
for item in items:
did_str = item.get('did')
pfn = item.get('pfn')
rse = item.get('rse')
if not did_str or not pfn or not rse:
logger(logging.DEBUG, item)
raise InputValidationError('The keys did, pfn, and rse are mandatory')
logger(logging.DEBUG, 'Preparing PFN download of %s (%s) from %s' % (did_str, pfn, rse))
if '*' in did_str:
logger(logging.DEBUG, did_str)
raise InputValidationError('Cannot use PFN download with wildcard in DID')
did_scope, did_name = self._split_did_str(did_str)
dest_dir_path = self._prepare_dest_dir(item.get('base_dir', '.'), did_scope, item.get('no_subdir'))
item['scope'] = did_scope
item['name'] = did_name
item['sources'] = [{'pfn': pfn, 'rse': rse}]
did_path_name = did_name
if self.extract_scope_convention and self.extract_scope_convention == 'belleii' and did_name.startswith('/'):
did_path_name = did_name[1:]
dest_file_path = os.path.join(dest_dir_path, did_path_name)
item['dest_file_paths'] = [dest_file_path]
item['temp_file_path'] = '%s.part' % dest_file_path
options = item.setdefault('merged_options', {})
options['ignore_checksum'] = 'adler32' not in item and 'md5' not in item
options.setdefault('transfer_timeout', item.pop('transfer_timeout', None))
input_items.append(item)
num_files_in = len(input_items)
output_items = self._download_multithreaded(input_items, num_threads, trace_custom_fields, traces_copy_out)
num_files_out = len(output_items)
if num_files_in != num_files_out:
raise RucioException('%d items were in the input queue but only %d are in the output queue' % (num_files_in, num_files_out))
return self._check_output(output_items)
def download_dids(self, items, num_threads=2, trace_custom_fields={}, traces_copy_out=None):
"""
Download items with given DIDs. This function can also download datasets and wildcarded DIDs.
:param items: List of dictionaries. Each dictionary describing an item to download. Keys:
did - DID string of this file (e.g. 'scope:file.name')
filters - Filter to select DIDs for download. Optional if DID is given
rse - Optional: rse name (e.g. 'CERN-PROD_DATADISK') or rse expression from where to download
no_resolve_archives - Optional: bool indicating whether archives should not be considered for download (Default: False)
resolve_archives - Deprecated: Use no_resolve_archives instead
force_scheme - Optional: force a specific scheme to download this item. (Default: None)
base_dir - Optional: base directory where the downloaded files will be stored. (Default: '.')
no_subdir - Optional: If true, files are written directly into base_dir. (Default: False)
nrandom - Optional: if the DID addresses a dataset, nrandom files will be randomly choosen for download from the dataset
ignore_checksum - Optional: If true, skips the checksum validation between the downloaded file and the rucio catalouge. (Default: False)
transfer_timeout - Optional: Timeout time for the download protocols. (Default: None)
transfer_speed_timeout - Optional: Minimum allowed transfer speed (in KBps). Ignored if transfer_timeout set. Otherwise, used to compute default timeout (Default: 500)
:param num_threads: Suggestion of number of threads to use for the download. It will be lowered if it's too high.
:param trace_custom_fields: Custom key value pairs to send with the traces.
:param traces_copy_out: reference to an external list, where the traces should be uploaded
:returns: a list of dictionaries with an entry for each file, containing the input options, the did, and the clientState
:raises InputValidationError: if one of the input items is in the wrong format
:raises NoFilesDownloaded: if no files could be downloaded
:raises NotAllFilesDownloaded: if not all files could be downloaded
:raises RucioException: if something unexpected went wrong during the download
"""
logger = self.logger
trace_custom_fields['uuid'] = generate_uuid()
logger(logging.INFO, 'Processing %d item(s) for input' % len(items))
did_to_input_items, file_items_with_sources = self._resolve_and_merge_input_items(copy.deepcopy(items))
self.logger(logging.DEBUG, 'num_unmerged_items=%d; num_dids=%d; num_file_items=%d' % (len(items), len(did_to_input_items), len(file_items_with_sources)))
input_items = self._prepare_items_for_download(did_to_input_items, file_items_with_sources)
num_files_in = len(input_items)
output_items = self._download_multithreaded(input_items, num_threads, trace_custom_fields, traces_copy_out)
num_files_out = len(output_items)
if num_files_in != num_files_out:
raise RucioException('%d items were in the input queue but only %d are in the output queue' % (num_files_in, num_files_out))
return self._check_output(output_items)
def download_from_metalink_file(self, item, metalink_file_path, num_threads=2, trace_custom_fields={}, traces_copy_out=None):
"""
Download items using a given metalink file.
:param item: dictionary describing an item to download. Keys:
base_dir - Optional: base directory where the downloaded files will be stored. (Default: '.')
no_subdir - Optional: If true, files are written directly into base_dir. (Default: False)
ignore_checksum - Optional: If true, skips the checksum validation between the downloaded file and the rucio catalouge. (Default: False)
transfer_timeout - Optional: Timeout time for the download protocols. (Default: None)
:param num_threads: Suggestion of number of threads to use for the download. It will be lowered if it's too high.
:param trace_custom_fields: Custom key value pairs to send with the traces.
:param traces_copy_out: reference to an external list, where the traces should be uploaded
:returns: a list of dictionaries with an entry for each file, containing the input options, the did, and the clientState
:raises InputValidationError: if one of the input items is in the wrong format
:raises NoFilesDownloaded: if no files could be downloaded
:raises NotAllFilesDownloaded: if not all files could be downloaded
:raises RucioException: if something unexpected went wrong during the download
"""
logger = self.logger
logger(logging.INFO, 'Getting sources from metalink file')
metalinks = parse_replicas_from_file(metalink_file_path)
trace_custom_fields['uuid'] = generate_uuid()
did_to_options = {}
for metalink in metalinks:
did = DIDType(metalink['did'])
did_to_options[did] = [item]
metalink['input_dids'] = {did: {}}
input_items = self._prepare_items_for_download(did_to_options, metalinks)
num_files_in = len(input_items)
output_items = self._download_multithreaded(input_items, num_threads, trace_custom_fields, traces_copy_out)
num_files_out = len(output_items)
if num_files_in != num_files_out:
raise RucioException('%d items were in the input queue but only %d are in the output queue' % (num_files_in, num_files_out))
return self._check_output(output_items)
def _download_multithreaded(self, input_items, num_threads, trace_custom_fields={}, traces_copy_out=None):
"""
Starts an appropriate number of threads to download items from the input list.
(This function is meant to be used as class internal only)
:param input_items: list containing the input items to download
:param num_threads: suggestion of how many threads should be started
:param trace_custom_fields: Custom key value pairs to send with the traces
:param traces_copy_out: reference to an external list, where the traces should be uploaded
:returns: list with output items as dictionaries
"""
logger = self.logger
num_files = len(input_items)
nlimit = 5
num_threads = max(1, num_threads)
num_threads = min(num_files, num_threads, nlimit)
input_queue = Queue()
output_queue = Queue()
input_queue.queue = deque(input_items)
if num_threads < 2:
logger(logging.INFO, 'Using main thread to download %d file(s)' % num_files)
self._download_worker(input_queue, output_queue, trace_custom_fields, traces_copy_out, '')
return list(output_queue.queue)
logger(logging.INFO, 'Using %d threads to download %d files' % (num_threads, num_files))
threads = []
for thread_num in range(0, num_threads):
log_prefix = 'Thread %s/%s: ' % (thread_num, num_threads)
kwargs = {'input_queue': input_queue,
'output_queue': output_queue,
'trace_custom_fields': trace_custom_fields,
'traces_copy_out': traces_copy_out,
'log_prefix': log_prefix}
try:
thread = Thread(target=self._download_worker, kwargs=kwargs)
thread.start()
threads.append(thread)
except Exception as error:
logger(logging.WARNING, 'Failed to start thread %d' % thread_num)
logger(logging.DEBUG, error)
try:
logger(logging.DEBUG, 'Waiting for threads to finish')
for thread in threads:
thread.join()
except KeyboardInterrupt:
logger(logging.WARNING, 'You pressed Ctrl+C! Exiting gracefully')
for thread in threads:
thread.kill_received = True
return list(output_queue.queue)
def _download_worker(self, input_queue, output_queue, trace_custom_fields, traces_copy_out, log_prefix):
"""
This function runs as long as there are items in the input queue,
downloads them and stores the output in the output queue.
(This function is meant to be used as class internal only)
:param input_queue: queue containing the input items to download
:param output_queue: queue where the output items will be stored
:param trace_custom_fields: Custom key value pairs to send with the traces
:param traces_copy_out: reference to an external list, where the traces should be uploaded
:param log_prefix: string that will be put at the beginning of every log message
"""
logger = self.logger
logger(logging.DEBUG, '%sStart processing queued downloads' % log_prefix)
while True:
try:
item = input_queue.get_nowait()
except Empty:
break
try:
trace = copy.deepcopy(self.trace_tpl)
trace.update(trace_custom_fields)
download_result = self._download_item(item, trace, traces_copy_out, log_prefix)
output_queue.put(download_result)
except KeyboardInterrupt:
logger(logging.WARNING, 'You pressed Ctrl+C! Exiting gracefully')
os.kill(os.getpgid(), signal.SIGINT)
break
except Exception as error:
logger(logging.ERROR, '%sFailed to download item' % log_prefix)
logger(logging.DEBUG, error)
@staticmethod
def _compute_actual_transfer_timeout(item):
"""
Merge the two options related to timeout into the value which will be used for protocol download.
:param item: dictionary that describes the item to download
:return: timeout in seconds
"""
default_transfer_timeout = 360
default_transfer_speed_timeout = 500 # KBps
# Static additive increment of the speed timeout. To include the static cost of
# establishing connections and download of small files
transfer_speed_timeout_static_increment = 60
transfer_timeout = item.get('merged_options', {}).get('transfer_timeout')
if transfer_timeout is not None:
return transfer_timeout
transfer_speed_timeout = item.get('merged_options', {}).get('transfer_speed_timeout')
bytes_ = item.get('bytes')
if not bytes_ or transfer_speed_timeout is None:
return default_transfer_timeout
if not transfer_speed_timeout > 0:
transfer_speed_timeout = default_transfer_speed_timeout
# Convert from KBytes/s to bytes/s
transfer_speed_timeout = transfer_speed_timeout * 1000
timeout = bytes_ // transfer_speed_timeout + transfer_speed_timeout_static_increment
return timeout
def _download_item(self, item, trace, traces_copy_out, log_prefix=''):
"""
Downloads the given item and sends traces for success/failure.
(This function is meant to be used as class internal only)
:param item: dictionary that describes the item to download
:param trace: dictionary representing a pattern of trace that will be send
:param traces_copy_out: reference to an external list, where the traces should be uploaded
:param log_prefix: string that will be put at the beginning of every log message
:returns: dictionary with all attributes from the input item and a clientState attribute
"""
logger = self.logger
pcache = Pcache() if self.check_pcache and len(item.get('archive_items', [])) == 0 else None
did_scope = item['scope']
did_name = item['name']
did_str = '%s:%s' % (did_scope, did_name)
logger(logging.INFO, '%sPreparing download of %s' % (log_prefix, did_str))
trace['scope'] = did_scope
trace['filename'] = did_name
trace.setdefault('datasetScope', item.get('dataset_scope', ''))
trace.setdefault('dataset', item.get('dataset_name', ''))
trace.setdefault('filesize', item.get('bytes'))
trace.setdefault('clientState', 'PROCESSING')
trace.setdefault('stateReason', 'UNKNOWN')
dest_file_paths = item['dest_file_paths']
# appending trace to list reference, if the reference exists
if traces_copy_out is not None:
traces_copy_out.append(trace)
# if file already exists make sure it exists at all destination paths, set state, send trace, and return
for dest_file_path in dest_file_paths:
if os.path.isfile(dest_file_path):
if not item.get('merged_options', {}).get('ignore_checksum', False):
verified, _, _ = _verify_checksum(item, dest_file_path)
if not verified:
logger(logging.INFO, '%sFile with same name exists locally, but checksum mismatches: %s' % (log_prefix, did_str))
continue
logger(logging.INFO, '%sFile exists already locally: %s' % (log_prefix, did_str))
for missing_file_path in dest_file_paths:
if not os.path.isfile(missing_file_path):
logger(logging.DEBUG, "copying '%s' to '%s'" % (dest_file_path, missing_file_path))
shutil.copy2(dest_file_path, missing_file_path)
item['clientState'] = 'ALREADY_DONE'
trace['transferStart'] = time.time()
trace['transferEnd'] = time.time()
trace['clientState'] = 'ALREADY_DONE'
send_trace(trace, self.client.host, self.client.user_agent)
return item
# check if file has replicas
sources = item.get('sources')
if not sources or not len(sources):
logger(logging.WARNING, '%sNo available source found for file: %s' % (log_prefix, did_str))
item['clientState'] = 'FILE_NOT_FOUND'
trace['clientState'] = 'FILE_NOT_FOUND'
trace['stateReason'] = 'No available sources'
self._send_trace(trace)
return item
# checking Pcache
storage_prefix = None
if pcache:
# to check only first replica is enough
pfn = sources[0]['pfn']
rse_name = sources[0]['rse']
# protocols are needed to extract deterministic part of the pfn
scheme = None
prots = self.client.get_protocols(rse_name)
for prot in prots:
if prot['scheme'] in pfn and prot['prefix'] in pfn:
scheme = prot['scheme']
storage_prefix = prot['prefix']
# proceed with the actual check
logger(logging.INFO, 'Checking whether %s is in pcache' % dest_file_path)
pcache_state = None
hardlink_state = None
try:
pcache_state, hardlink_state = pcache.check_and_link(src=pfn, storage_root=storage_prefix, dst=dest_file_path)
except Exception as e:
logger(logging.WARNING, 'Pcache failure: %s' % str(e))
# if file found in pcache, send trace and return
if pcache_state == 0 and hardlink_state == 1:
logger(logging.INFO, 'File found in pcache.')
item['clientState'] = 'FOUND_IN_PCACHE'
trace['transferStart'] = time.time()
trace['transferEnd'] = time.time()
trace['clientState'] = 'FOUND_IN_PCACHE'
self._send_trace(trace)
return item
else:
logger(logging.INFO, 'File not found in pcache.')
# try different PFNs until one succeeded
temp_file_path = item['temp_file_path']
success = False
i = 0
while not success and i < len(sources):
source = sources[i]
i += 1
pfn = source['pfn']
rse_name = source['rse']
scheme = pfn.split(':')[0]
try:
rse = rsemgr.get_rse_info(rse_name, vo=self.client.vo)
except RucioException as error:
logger(logging.WARNING, '%sCould not get info of RSE %s: %s' % (log_prefix, rse_name, error))
trace['stateReason'] = str(error)
continue
trace['remoteSite'] = rse_name
trace['clientState'] = 'DOWNLOAD_ATTEMPT'
trace['protocol'] = scheme
transfer_timeout = self._compute_actual_transfer_timeout(item)
timeout_log_string = ""
if transfer_timeout:
timeout_log_string = " and timeout of %ds" % transfer_timeout
logger(logging.INFO, '%sTrying to download with %s%s from %s: %s ' % (log_prefix, scheme, timeout_log_string, rse_name, did_str))
try:
protocol = rsemgr.create_protocol(rse, operation='read', scheme=scheme, auth_token=self.auth_token, logger=logger)
protocol.connect()
except Exception as error:
logger(logging.WARNING, '%sFailed to create protocol for PFN: %s' % (log_prefix, pfn))
logger(logging.DEBUG, 'scheme: %s, exception: %s' % (scheme, error))
trace['stateReason'] = str(error)
continue
attempt = 0
retries = 2
# do some retries with the same PFN if the download fails
while not success and attempt < retries:
attempt += 1
item['attemptnr'] = attempt
if os.path.isfile(temp_file_path):
logger(logging.DEBUG, '%sDeleting existing temporary file: %s' % (log_prefix, temp_file_path))
os.unlink(temp_file_path)
start_time = time.time()
try:
protocol.get(pfn, temp_file_path, transfer_timeout=transfer_timeout)
success = True
except Exception as error:
logger(logging.DEBUG, error)
trace['clientState'] = str(type(error).__name__)
trace['stateReason'] = str(error)
end_time = time.time()
if success and not item.get('merged_options', {}).get('ignore_checksum', False):
verified, rucio_checksum, local_checksum = _verify_checksum(item, temp_file_path)
if not verified:
success = False
os.unlink(temp_file_path)
logger(logging.WARNING, '%sChecksum validation failed for file: %s' % (log_prefix, did_str))
logger(logging.DEBUG, 'Local checksum: %s, Rucio checksum: %s' % (local_checksum, rucio_checksum))
trace['clientState'] = 'FAIL_VALIDATE'
trace['stateReason'] = 'Checksum validation failed: Local checksum: %s, Rucio checksum: %s' % (local_checksum, rucio_checksum)
if not success:
logger(logging.WARNING, '%sDownload attempt failed. Try %s/%s' % (log_prefix, attempt, retries))
self._send_trace(trace)
protocol.close()
if not success:
logger(logging.ERROR, '%sFailed to download file %s' % (log_prefix, did_str))
item['clientState'] = 'FAILED'
return item
dest_file_path_iter = iter(dest_file_paths)
first_dest_file_path = next(dest_file_path_iter)
logger(logging.DEBUG, "renaming '%s' to '%s'" % (temp_file_path, first_dest_file_path))
os.rename(temp_file_path, first_dest_file_path)
# if the file was downloaded with success, it can be linked to pcache
if pcache:
logger(logging.INFO, 'File %s is going to be registerred into pcache.' % dest_file_path)
try:
pcache_state, hardlink_state = pcache.check_and_link(src=pfn, storage_root=storage_prefix, local_src=first_dest_file_path)
logger(logging.INFO, 'File %s is now registerred into pcache.' % first_dest_file_path)
except Exception as e:
logger(logging.WARNING, 'Failed to load file to pcache: %s' % str(e))
for cur_dest_file_path in dest_file_path_iter:
logger(logging.DEBUG, "copying '%s' to '%s'" % (first_dest_file_path, cur_dest_file_path))
shutil.copy2(first_dest_file_path, cur_dest_file_path)
trace['transferStart'] = start_time
trace['transferEnd'] = end_time
trace['clientState'] = 'DONE'
trace['stateReason'] = 'OK'
item['clientState'] = 'DONE'
self._send_trace(trace)
duration = round(end_time - start_time, 2)
size = item.get('bytes')
size_str = sizefmt(size, self.is_human_readable)
if size and duration:
rate = round((size / duration) * 1e-6, 2)
logger(logging.INFO, '%sFile %s successfully downloaded. %s in %s seconds = %s MBps' % (log_prefix, did_str, size_str, duration, rate))
else:
logger(logging.INFO, '%sFile %s successfully downloaded in %s seconds' % (log_prefix, did_str, duration))
file_items_in_archive = item.get('archive_items', [])
if len(file_items_in_archive) > 0:
logger(logging.INFO, '%sExtracting %d file(s) from %s' % (log_prefix, len(file_items_in_archive), did_name))
archive_file_path = first_dest_file_path
for file_item in file_items_in_archive:
extraction_ok = False
extract_file_name = file_item['name']
dest_file_path_iter = iter(file_item['dest_file_paths'])
first_dest_file_path = next(dest_file_path_iter)
dest_dir = os.path.dirname(first_dest_file_path)
logger(logging.DEBUG, '%sExtracting %s to %s' % (log_prefix, extract_file_name, dest_dir))
for extraction_tool in self.extraction_tools:
if extraction_tool.try_extraction(archive_file_path, extract_file_name, dest_dir):
extraction_ok = True
break
if not extraction_ok:
logger(logging.ERROR, 'Extraction of file %s from archive %s failed.' % (extract_file_name, did_name))
continue
first_dest_file_path = os.path.join(dest_dir, extract_file_name)
for cur_dest_file_path in dest_file_path_iter:
logger(logging.DEBUG, "copying '%s' to '%s'" % (first_dest_file_path, cur_dest_file_path))
shutil.copy2(first_dest_file_path, cur_dest_file_path)
if not item.get('shall_keep_archive'):
logger(logging.DEBUG, '%sDeleting archive %s' % (log_prefix, did_name))
os.remove(archive_file_path)
return item
def download_aria2c(self, items, trace_custom_fields={}, filters={}):
"""
Uses aria2c to download the items with given DIDs. This function can also download datasets and wildcarded DIDs.
It only can download files that are available via https/davs.
Aria2c needs to be installed and X509_USER_PROXY needs to be set!
:param items: List of dictionaries. Each dictionary describing an item to download. Keys:
did - DID string of this file (e.g. 'scope:file.name'). Wildcards are not allowed
rse - Optional: rse name (e.g. 'CERN-PROD_DATADISK') or rse expression from where to download
base_dir - Optional: base directory where the downloaded files will be stored. (Default: '.')
no_subdir - Optional: If true, files are written directly into base_dir. (Default: False)
nrandom - Optional: if the DID addresses a dataset, nrandom files will be randomly choosen for download from the dataset
ignore_checksum - Optional: If true, skips the checksum validation between the downloaded file and the rucio catalouge. (Default: False)
:param trace_custom_fields: Custom key value pairs to send with the traces
:param filters: dictionary containing filter options
:returns: a list of dictionaries with an entry for each file, containing the input options, the did, and the clientState
:raises InputValidationError: if one of the input items is in the wrong format
:raises NoFilesDownloaded: if no files could be downloaded
:raises NotAllFilesDownloaded: if not all files could be downloaded
:raises RucioException: if something went wrong during the download (e.g. aria2c could not be started)
"""
logger = self.logger
trace_custom_fields['uuid'] = generate_uuid()
rpc_secret = '%x' % (random.getrandbits(64))
rpc_auth = 'token:%s' % rpc_secret
rpcproc, aria_rpc = self._start_aria2c_rpc(rpc_secret)
for item in items:
item['force_scheme'] = ['https', 'davs']
item['no_resolve_archives'] = True
logger(logging.INFO, 'Processing %d item(s) for input' % len(items))
did_to_input_items, file_items_with_sources = self._resolve_and_merge_input_items(copy.deepcopy(items))
self.logger(logging.DEBUG, 'num_unmerged_items=%d; num_dids=%d; num_file_items=%d' % (len(items), len(did_to_input_items), len(file_items_with_sources)))
input_items = self._prepare_items_for_download(did_to_input_items, file_items_with_sources)
try:
output_items = self._download_items_aria2c(input_items, aria_rpc, rpc_auth, trace_custom_fields)
except Exception as error:
self.logger(logging.ERROR, 'Unknown exception during aria2c download')
self.logger(logging.DEBUG, error)
finally:
try:
aria_rpc.aria2.forceShutdown(rpc_auth)
finally:
rpcproc.terminate()
return self._check_output(output_items)
def _start_aria2c_rpc(self, rpc_secret):
"""
Starts aria2c in RPC mode as a subprocess. Also creates
the RPC proxy instance.
(This function is meant to be used as class internal only)
:param rpc_secret: the secret for the RPC proxy
:returns: a tupel with the process and the rpc proxy objects
:raises RucioException: if the process or the proxy could not be created
"""
logger = self.logger
try:
from xmlrpclib import ServerProxy as RPCServerProxy # py2
except ImportError:
from xmlrpc.client import ServerProxy as RPCServerProxy
cmd = 'aria2c '\
'--enable-rpc '\
'--certificate=$X509_USER_PROXY '\
'--private-key=$X509_USER_PROXY '\
'--ca-certificate=/etc/pki/tls/certs/CERN-bundle.pem '\
'--quiet=true '\
'--allow-overwrite=true '\
'--auto-file-renaming=false '\
'--stop-with-process=%d '\
'--rpc-secret=%s '\
'--rpc-listen-all=false '\
'--rpc-max-request-size=100M '\
'--connect-timeout=5 '\
'--rpc-listen-port=%d'
logger(logging.INFO, 'Starting aria2c rpc server...')
# trying up to 3 random ports
for attempt in range(3):
port = random.randint(1024, 65534)
logger(logging.DEBUG, 'Trying to start rpc server on port: %d' % port)
try:
to_exec = cmd % (os.getpid(), rpc_secret, port)
logger(logging.DEBUG, to_exec)
rpcproc = execute(to_exec, False)
except Exception as error:
raise RucioException('Failed to execute aria2c!', error)
# if port is in use aria should fail to start so give it some time
time.sleep(2)
# did it fail?
if rpcproc.poll() is not None:
(out, err) = rpcproc.communicate()
logger(logging.DEBUG, 'Failed to start aria2c with port: %d' % port)
logger(logging.DEBUG, 'aria2c output: %s' % out)
else:
break
if rpcproc.poll() is not None:
raise RucioException('Failed to start aria2c rpc server!')
try:
aria_rpc = RPCServerProxy('http://localhost:%d/rpc' % port)
except Exception as error:
rpcproc.kill()
raise RucioException('Failed to initialise rpc proxy!', error)
return (rpcproc, aria_rpc)
def _download_items_aria2c(self, items, aria_rpc, rpc_auth, trace_custom_fields={}):
"""
Uses aria2c to download the given items. Aria2c needs to be started
as RPC background process first and a RPC proxy is needed.
(This function is meant to be used as class internal only)
:param items: list of dictionaries containing one dict for each file to download
:param aria_rcp: RPCProxy to the aria2c process
:param rpc_auth: the rpc authentication token
:param trace_custom_fields: Custom key value pairs to send with the traces
:returns: a list of dictionaries with an entry for each file, containing the input options, the did, and the clientState
"""
logger = self.logger
gid_to_item = {} # maps an aria2c download id (gid) to the download item
pfn_to_rse = {}
items_to_queue = [item for item in items]
# items get removed from gid_to_item when they are complete or failed
while len(gid_to_item) or len(items_to_queue):
num_queued = 0
# queue up to 100 files and then check arias status
while (num_queued < 100) and len(items_to_queue):
item = items_to_queue.pop()
file_scope = item['scope']
file_name = item['name']
file_did_str = '%s:%s' % (file_scope, file_name)
trace = {'scope': file_scope,
'filename': file_name,
'datasetScope': item.get('dataset_scope', ''),
'dataset': item.get('dataset_name', ''),
'protocol': 'https',
'remoteSite': '',
'filesize': item.get('bytes', None),
'transferStart': time.time(),
'transferEnd': time.time()}
trace.update(self.trace_tpl)
trace.update(trace_custom_fields)
# get pfns from all replicas
pfns = []
for src in item['sources']:
pfn = src['pfn']
if pfn[0:4].lower() == 'davs':
pfn = pfn.replace('davs', 'https', 1)
pfns.append(pfn)
pfn_to_rse[pfn] = src['rse']
# does file exist and are sources available?
# workaround: only consider first dest file path for aria2c download
dest_file_path = next(iter(item['dest_file_paths']))
if os.path.isfile(dest_file_path):
logger(logging.INFO, 'File exists already locally: %s' % file_did_str)
item['clientState'] = 'ALREADY_DONE'
trace['clientState'] = 'ALREADY_DONE'
self._send_trace(trace)
elif len(pfns) == 0:
logger(logging.WARNING, 'No available source found for file: %s' % file_did_str)
item['clientState'] = 'FILE_NOT_FOUND'
trace['clientState'] = 'FILE_NOT_FOUND'
self._send_trace(trace)
else:
item['trace'] = trace
options = {'dir': os.path.dirname(dest_file_path),
'out': os.path.basename(item['temp_file_path'])}
gid = aria_rpc.aria2.addUri(rpc_auth, pfns, options)
gid_to_item[gid] = item
num_queued += 1
logger(logging.DEBUG, 'Queued file: %s' % file_did_str)
# get some statistics
aria_stat = aria_rpc.aria2.getGlobalStat(rpc_auth)
num_active = int(aria_stat['numActive'])
num_waiting = int(aria_stat['numWaiting'])
num_stopped = int(aria_stat['numStoppedTotal'])
# save start time if one of the active downloads has started
active = aria_rpc.aria2.tellActive(rpc_auth, ['gid', 'completedLength'])
for dlinfo in active:
gid = dlinfo['gid']
if int(dlinfo['completedLength']) > 0:
gid_to_item[gid].setdefault('transferStart', time.time())
stopped = aria_rpc.aria2.tellStopped(rpc_auth, -1, num_stopped, ['gid', 'status', 'files'])
for dlinfo in stopped:
gid = dlinfo['gid']
item = gid_to_item[gid]
file_scope = item['scope']
file_name = item['name']
file_did_str = '%s:%s' % (file_scope, file_name)
temp_file_path = item['temp_file_path']
# workaround: only consider first dest file path for aria2c download
dest_file_path = next(iter(item['dest_file_paths']))
# ensure we didnt miss the active state (e.g. a very fast download)
start_time = item.setdefault('transferStart', time.time())
end_time = item.setdefault('transferEnd', time.time())
# get used pfn for traces
trace = item['trace']
for uri in dlinfo['files'][0]['uris']:
if uri['status'].lower() == 'used':
trace['remoteSite'] = pfn_to_rse.get(uri['uri'], '')
trace['transferStart'] = start_time
trace['transferEnd'] = end_time
# ensure file exists
status = dlinfo.get('status', '').lower()
if status == 'complete' and os.path.isfile(temp_file_path):
# checksum check
skip_check = item.get('ignore_checksum', False)
rucio_checksum = 0 if skip_check else item.get('adler32')
local_checksum = 0 if skip_check else adler32(temp_file_path)
if str(rucio_checksum).lstrip('0') == str(local_checksum).lstrip('0'):
item['clientState'] = 'DONE'
trace['clientState'] = 'DONE'
# remove .part ending
os.rename(temp_file_path, dest_file_path)
# calculate duration
duration = round(end_time - start_time, 2)
duration = max(duration, 0.01) # protect against 0 division
size = item.get('bytes', 0)
rate = round((size / duration) * 1e-6, 2)
size_str = sizefmt(size, self.is_human_readable)
logger(logging.INFO, 'File %s successfully downloaded. %s in %s seconds = %s MBps' % (file_did_str,
size_str,
duration,
rate))
else:
os.unlink(temp_file_path)
logger(logging.WARNING, 'Checksum validation failed for file: %s' % file_did_str)
logger(logging.DEBUG, 'Local checksum: %s, Rucio checksum: %s' % (local_checksum, rucio_checksum))
item['clientState'] = 'FAIL_VALIDATE'
trace['clientState'] = 'FAIL_VALIDATE'
else:
logger(logging.ERROR, 'Failed to download file: %s' % file_did_str)
logger(logging.DEBUG, 'Aria2c status: %s' % status)
item['clientState'] = 'FAILED'
trace['clientState'] = 'DOWNLOAD_ATTEMPT'
self._send_trace(trace)
del item['trace']
aria_rpc.aria2.removeDownloadResult(rpc_auth, gid)
del gid_to_item[gid]
if len(stopped) > 0:
logger(logging.INFO, 'Active: %d, Waiting: %d, Stopped: %d' % (num_active, num_waiting, num_stopped))
return items
def _resolve_one_item_dids(self, item):
"""
Resolve scopes or wildcard DIDs to lists of full did names:
:param item: One input item
"""
dids = item.get('did')
filters = item.get('filters', {})
if filters:
filters = copy.copy(filters)
if dids is None:
self.logger(logging.DEBUG, 'Resolving DIDs by using filter options')
scope = filters.pop('scope')
for did in self.client.list_dids(scope, filters=filters, did_type='all', long=True):
yield did
return
if not isinstance(dids, list):
dids = [dids]
for did_str in dids:
scope, did_name = self._split_did_str(did_str)
filters['name'] = did_name
any_did_resolved = False
for did in self.client.list_dids(scope, filters=filters, did_type='all', long=True):
yield did
any_did_resolved = True
# Maintain compatibility with existing code, which expects non-existing DIDs be
# passed through in order to correctly set trace state to FILE_NOT_FOUND
if not any_did_resolved and '*' not in did_name:
yield {'scope': scope, 'name': did_name}
def _resolve_and_merge_input_items(self, input_items):
"""
This function takes the input items given to download_dids etc.
and resolves the sources.
- It first performs a list_dids call to dereference any wildcards and
retrieve DID stats (size, length, type).
- Next, input items are grouped together by common list_replicas options.
For each group, a single list_replicas call is performed.
- The resolved File DIDs with sources are finally mapped back to initial
input items to be able to correctly retrieve download options
(timeout, destination directories, etc)
:param input_items: List of dictionaries. Each dictionary describing an input item
:returns: a tuple:
- a dictionary that maps the dereferenced(w/o wildcards) input DIDs to a list of input items
- and a list with a dictionary for each file DID which has to be downloaded
:raises InputValidationError: if one of the input items is in the wrong format
"""
logger = self.logger
# check mandatory options before doing any server calls
resolve_archives = False
for item in input_items:
if item.get('resolve_archives') is not None:
logger(logging.WARNING, 'resolve_archives option is deprecated and will be removed in a future release.')
item.setdefault('no_resolve_archives', not item.pop('resolve_archives'))
# If any item needs to resolve archives
if not item.get('no_resolve_archives'):
resolve_archives = True
if not item.get('did'):
if not item.get('filters', {}).get('scope'):
logger(logging.DEBUG, item)
raise InputValidationError('Item without did and filter/scope')
if resolve_archives:
# perhaps we'll need an extraction tool so check what is installed
self.extraction_tools = [tool for tool in self.extraction_tools if tool.is_useable()]
if len(self.extraction_tools) < 1:
logger(logging.WARNING, 'Archive resolution is enabled but no extraction tool is available. '
'Sources whose protocol doesnt support extraction wont be considered for download.')
# if excluding tapes, we need to list them first
tape_rses = []
if self.is_tape_excluded:
try:
tape_rses = [endp['rse'] for endp in self.client.list_rses(rse_expression='istape=true')]
except:
logger(logging.DEBUG, 'No tapes found.')
# Matches each dereferenced DID back to a list of input items
did_to_input_items = {}
# Resolve DIDs
for item in input_items:
resolved_dids = list(self._resolve_one_item_dids(item))
if not resolved_dids:
logger(logging.WARNING, 'An item didnt have any DIDs after resolving the input: %s.' % item.get('did', item))
item['dids'] = resolved_dids
for did in resolved_dids:
did_to_input_items.setdefault(DIDType(did), []).append(item)
if 'length' in did and not did['length']:
did_with_size = self.client.get_did(scope=did['scope'], name=did['name'], dynamic=True)
did['length'] = did_with_size['length']
did['bytes'] = did_with_size['bytes']
# group input items by common options to reduce the number of calls to list_replicas
distinct_keys = ['rse', 'force_scheme', 'no_resolve_archives']
item_groups = []
for item in input_items:
found_compatible_group = False
if not item.get('nrandom'):
# Don't merge items if nrandom is set. Otherwise two items with the same nrandom will be merged into one
# and we'll effectively download only half of the desired replicas for each item.
for item_group in item_groups:
if all(item.get(k) == item_group[0].get(k) for k in distinct_keys):
item_group.append(item)
found_compatible_group = True
break
if not found_compatible_group:
item_groups.append([item])
# List replicas for dids
merged_items_with_sources = []
for item_group in item_groups:
# Take configuration from the first item in the group; but dids from all items
item = item_group[0]
input_dids = {DIDType(did): did
for item in item_group
for did in item.get('dids')}
# since we're using metalink we need to explicitly give all schemes
schemes = item.get('force_scheme')
if schemes:
schemes = schemes if isinstance(schemes, list) else [schemes]
logger(logging.DEBUG, 'schemes: %s' % schemes)
# RSE expression, still with tape endpoints included
rse_expression = item.get('rse')
logger(logging.DEBUG, 'rse_expression: %s' % rse_expression)
# get PFNs of files and datasets
logger(logging.DEBUG, 'num DIDs for list_replicas call: %d' % len(item['dids']))
nrandom = item.get('nrandom')
if nrandom:
logger(logging.INFO, 'Selecting %d random replicas from DID(s): %s' % (nrandom, [str(did) for did in input_dids]))
metalink_str = self.client.list_replicas([{'scope': did.scope, 'name': did.name} for did in input_dids],
schemes=schemes,
ignore_availability=False,
rse_expression=rse_expression,
client_location=self.client_location,
resolve_archives=not item.get('no_resolve_archives'),
resolve_parents=True,
nrandom=nrandom,
metalink=True)
file_items = parse_replicas_from_string(metalink_str)
logger(logging.DEBUG, 'num resolved files: %s' % len(file_items))
if not nrandom or nrandom != len(file_items):
# If list_replicas didn't resolve any file DIDs for any input did, we pass through the input DID.
# This is done to keep compatibility with later code which generates "FILE_NOT_FOUND" traces
# and output items.
# In the special case of nrandom, when serverside filtering is applied, it's "normal" for some input
# dids to be ignored as long as we got exactly nrandom file_items from the server.
for input_did in input_dids:
if not any([input_did == f['did'] or str(input_did) in f['parent_dids'] for f in file_items]):
logger(logging.ERROR, 'DID does not exist: %s' % input_did)
# TODO: store did directly as DIDType object
file_items.append({'did': str(input_did), 'adler32': None, 'md5': None, 'sources': [], 'parent_dids': set()})
# filtering out tape sources
if self.is_tape_excluded:
for file_item in file_items:
unfiltered_sources = copy.copy(file_item['sources'])
for src in unfiltered_sources:
if src in tape_rses:
file_item['sources'].remove(src)
if unfiltered_sources and not file_item['sources']:
logger(logging.WARNING, 'The requested DID {} only has replicas on tape. Direct download from tape is prohibited. '
'Please request a transfer to a non-tape endpoint.'.format(file_item['did']))
# Match the file did back to the dids which were provided to list_replicas.
# Later, this will allow to match the file back to input_items via did_to_input_items
for file_item in file_items:
file_did = DIDType(file_item['did'])
file_input_dids = {DIDType(did) for did in file_item.get('parent_dids', [])}.intersection(input_dids)
if file_did in input_dids:
file_input_dids.add(file_did)
file_item['input_dids'] = {did: input_dids[did] for did in file_input_dids}
merged_items_with_sources.extend(file_items)
return did_to_input_items, merged_items_with_sources
def _options_from_input_items(self, input_items):
"""
Best-effort generation of download options from multiple input items which resolve to the same file DID.
This is done to download each file DID only once, even if it is requested multiple times via overlapping
datasets and/or wildcard resolutions in distinct input items.
Some options can be easily merged. For example: multiple base_dir are all appended to a list. As a result,
the file is downloaded once and copied to all desired destinations.
Other options are not necessarily compatible. For example, two items requesting two different values for
download timeout. We make our best to merge the options in such cases.
"""
options = {}
for item in input_items:
base_dir = item.get('base_dir', '.')
no_subdir = item.get('no_subdir', False)
ignore_checksum = item.get('ignore_checksum', False)
new_transfer_timeout = item.get('transfer_timeout', None)
new_transfer_speed_timeout = item.get('transfer_speed_timeout', None)
options.setdefault('destinations', set()).add((base_dir, no_subdir))
# Merge some options
# The other options of this DID will be inherited from the first item that contained the DID
options['ignore_checksum'] = (options.get('ignore_checksum') or ignore_checksum)
# if one item wants to resolve archives we enable it for all items
options['resolve_archives'] = (options.get('resolve_archives') or not item.get('no_resolve_archives'))
cur_transfer_timeout = options.setdefault('transfer_timeout', None)
if cur_transfer_timeout is not None and new_transfer_timeout is not None:
options['transfer_timeout'] = max(int(cur_transfer_timeout), int(new_transfer_timeout))
elif new_transfer_timeout is not None:
options['transfer_timeout'] = int(new_transfer_timeout)
cur_transfer_speed_timeout = options.setdefault('transfer_speed_timeout', None)
if cur_transfer_speed_timeout is not None and new_transfer_speed_timeout is not None:
options['transfer_speed_timeout'] = min(float(cur_transfer_speed_timeout), float(new_transfer_speed_timeout))
elif new_transfer_speed_timeout is not None:
options['transfer_speed_timeout'] = float(new_transfer_speed_timeout)
return options
def _prepare_items_for_download(self, did_to_input_items, file_items):
"""
Optimises the amount of files to download
(This function is meant to be used as class internal only)
:param did_to_input_items: dictionary that maps resolved input DIDs to input items
:param file_items: list of dictionaries. Each dictionary describes a File DID to download
:returns: list of dictionaries. Each dictionary describes an element to download
:raises InputValidationError: if the given input is not valid or incomplete
"""
logger = self.logger
# maps file item IDs (fiid) to the file item object
fiid_to_file_item = {}
# cea -> client_extract archives to avoid confusion with archives that dont need explicit extraction
# this dict will contain all ids of cea's that definitely will be downloaded
cea_id_pure_to_fiids = {}
# this dict will contain ids of cea's that have higher prioritised non cea sources
cea_id_mixed_to_fiids = {}
all_dest_file_paths = set()
# get replicas for every file of the given dids
for file_item in file_items:
file_did = DIDType(file_item['did'])
input_items = list(itertools.chain.from_iterable(did_to_input_items.get(did, []) for did in file_item['input_dids']))
options = self._options_from_input_items(input_items)
file_item['scope'] = file_did.scope
file_item['name'] = file_did.name
logger(logging.DEBUG, 'Queueing file: %s' % file_did)
logger(logging.DEBUG, 'real parents: %s' % [str(did) for did in file_item['input_dids'] if did != file_did])
logger(logging.DEBUG, 'options: %s' % options)
# prepare destinations folders:
dest_file_paths = file_item.get('dest_file_paths', set())
for input_did in file_item['input_dids']:
for item in did_to_input_items[input_did]:
base_dir = item.get('base_dir', '.')
no_subdir = item.get('no_subdir', False)
file_did_path = file_did.name
if input_did != file_did:
# if datasets were given: prepare the destination paths for each dataset
if self.extract_scope_convention == 'belleii' and file_did_path.startswith('/'):
file_did_path = file_did_path.split('/')[-1]
path = os.path.join(self._prepare_dest_dir(base_dir, input_did.name, no_subdir), file_did_path)
else:
# if no datasets were given only prepare the given destination paths
if self.extract_scope_convention == 'belleii' and file_did_path.startswith('/'):
file_did_path = file_did_path[1:]
path = os.path.join(self._prepare_dest_dir(base_dir, file_did.scope, no_subdir), file_did_path)
if path in all_dest_file_paths:
raise RucioException("Multiple file items with same destination file path")
all_dest_file_paths.add(path)
dest_file_paths.add(path)
# workaround: just take any given dataset for the traces and the output
file_item.setdefault('dataset_scope', input_did.scope)
file_item.setdefault('dataset_name', input_did.name)
if not options:
continue
resolve_archives = options.get('resolve_archives')
file_item['merged_options'] = options
file_item['dest_file_paths'] = list(dest_file_paths)
file_item['temp_file_path'] = '%s.part' % file_item['dest_file_paths'][0]
# the file did str ist not an unique key for this dict because multiple calls of list_replicas
# could result in the same DID multiple times. So we're using the id of the dictionary objects
fiid = id(file_item)
fiid_to_file_item[fiid] = file_item
if resolve_archives:
min_cea_priority = None
num_non_cea_sources = 0
cea_ids = []
sources = []
# go through sources and check how many (non-)cea sources there are,
# index cea sources, or remove cea sources if there is no extraction tool
for source in file_item['sources']:
is_cea = source.get('client_extract', False)
if is_cea and (len(self.extraction_tools) > 0):
priority = int(source['priority'])
if min_cea_priority is None or priority < min_cea_priority:
min_cea_priority = priority
# workaround since we dont have the archive DID use the part behind the last slash of the PFN
# this doesn't respect the scope of the archive DID!!!
# and we trust that client_extract==True sources dont have any parameters at the end of the PFN
cea_id = source['pfn'].split('/')
cea_id = cea_id[-1] if len(cea_id[-1]) > 0 else cea_id[-2]
cea_ids.append(cea_id)
sources.append(source)
elif not is_cea:
num_non_cea_sources += 1
sources.append(source)
else:
# no extraction tool
logger(logging.DEBUG, 'client_extract=True; ignoring source: %s' % source['pfn'])
logger(logging.DEBUG, 'Prepared sources: num_sources=%d/%d; num_non_cea_sources=%d; num_cea_ids=%d'
% (len(sources), len(file_item['sources']), num_non_cea_sources, len(cea_ids)))
file_item['sources'] = sources
# if there are no cea sources we are done for this item
if min_cea_priority is None:
continue
# decide if file item belongs to the pure or mixed map
# if no non-archive src exists or the highest prio src is an archive src we put it in the pure map
elif num_non_cea_sources == 0 or min_cea_priority == 1:
logger(logging.DEBUG, 'Adding fiid to cea pure map: '
'num_non_cea_sources=%d; min_cea_priority=%d; num_cea_sources=%d'
% (num_non_cea_sources, min_cea_priority, len(cea_ids)))
for cea_id in cea_ids:
cea_id_pure_to_fiids.setdefault(cea_id, set()).add(fiid)
file_item.setdefault('cea_ids_pure', set()).add(cea_id)
# if there are non-archive sources and archive sources we put it in the mixed map
elif len(cea_ids) > 0:
logger(logging.DEBUG, 'Adding fiid to cea mixed map: '
'num_non_cea_sources=%d; min_cea_priority=%d; num_cea_sources=%d'
% (num_non_cea_sources, min_cea_priority, len(cea_ids)))
for cea_id in cea_ids:
cea_id_mixed_to_fiids.setdefault(cea_id, set()).add(fiid)
file_item.setdefault('cea_ids_mixed', set()).add(cea_id)
# put all archives from the mixed list into the pure list if they meet
# certain conditions, e.g., an archive that is already in the pure list
for cea_id_mixed in list(cea_id_mixed_to_fiids.keys()):
fiids_mixed = cea_id_mixed_to_fiids[cea_id_mixed]
if cea_id_mixed in cea_id_pure_to_fiids:
# file from mixed list is already in a pure list
logger(logging.DEBUG, 'Mixed ID is already in cea pure map: '
'cea_id_mixed=%s; num_fiids_mixed=%d; num_cea_pure_fiids=%d'
% (cea_id_mixed, len(fiids_mixed), len(cea_id_pure_to_fiids[cea_id_mixed])))
elif len(fiids_mixed) >= self.use_cea_threshold:
# more than use_cea_threshold files are in a common archive
logger(logging.DEBUG, 'Number of needed files in cea reached threshold: '
'cea_id_mixed=%s; num_fiids_mixed=%d; threshold=%d'
% (cea_id_mixed, len(fiids_mixed), self.use_cea_threshold))
else:
# dont move from mixed list to pure list
continue
# first add cea_id to pure map so it can be removed from mixed map later
cea_id_pure_to_fiids.setdefault(cea_id_mixed, set()).update(fiids_mixed)
# now update all file_item mixed/pure maps
for fiid_mixed in list(fiids_mixed):
file_item = fiid_to_file_item[fiid_mixed]
# add cea id to file_item pure map
file_item.setdefault('cea_ids_pure', set()).add(cea_id_mixed)
# remove file item mixed map and
# remove references from all other mixed archives to file_item
for cea_id_mixed2 in file_item.pop('cea_ids_mixed'):
cea_id_mixed_to_fiids[cea_id_mixed2].remove(fiid_mixed)
# finally remove cea_id from mixed map
cea_id_mixed_to_fiids.pop(cea_id_mixed)
for file_item in file_items:
cea_ids_pure = file_item.get('cea_ids_pure', set())
cea_ids_mixed = file_item.get('cea_ids_mixed', set())
if len(cea_ids_pure) > 0:
logger(logging.DEBUG, 'Removing all non-cea sources of file %s' % file_item['did'])
file_item['sources'] = [s for s in file_item['sources'] if s.get('client_extract', False)]
elif len(cea_ids_mixed) > 0:
logger(logging.DEBUG, 'Removing all cea sources of file %s' % file_item['did'])
file_item['sources'] = [s for s in file_item['sources'] if not s.get('client_extract', False)]
# reduce the amount of archives to download by removing
# all redundant pure archives (=all files can be extracted from other archives)
for cea_id_pure in list(cea_id_pure_to_fiids.keys()):
# if all files of this archive are available in more than one archive the archive is redundant
if all(len(fiid_to_file_item[fiid_pure]['cea_ids_pure']) > 1 for fiid_pure in cea_id_pure_to_fiids[cea_id_pure]):
for fiid_pure in cea_id_pure_to_fiids[cea_id_pure]:
fiid_to_file_item[fiid_pure]['cea_ids_pure'].discard(cea_id_pure)
logger(logging.DEBUG, 'Removing redundant archive %s' % cea_id_pure)
cea_id_pure_to_fiids.pop(cea_id_pure)
# remove all archives of a file except a single one so
# that each file is assigned to exactly one pure archive
for cea_id_pure in cea_id_pure_to_fiids:
for fiid_pure in cea_id_pure_to_fiids[cea_id_pure]:
cea_ids_pure = fiid_to_file_item[fiid_pure]['cea_ids_pure']
for cea_id_pure_other in list(cea_ids_pure):
if cea_id_pure != cea_id_pure_other:
cea_id_pure_to_fiids[cea_id_pure_other].discard(fiid_pure)
cea_ids_pure.discard(cea_id_pure_other)
download_packs = []
cea_id_to_pack = {}
for file_item in file_items:
cea_ids = file_item.get('cea_ids_pure', set())
if len(cea_ids) > 0:
cea_id = next(iter(cea_ids))
pack = cea_id_to_pack.get(cea_id)
if pack is None:
scope = file_item['scope']
first_dest = next(iter(file_item['merged_options']['destinations']))
dest_path = os.path.join(self._prepare_dest_dir(first_dest[0], scope, first_dest[1]), cea_id)
pack = {'scope': scope,
'name': cea_id,
'dest_file_paths': [dest_path],
'temp_file_path': '%s.part' % dest_path,
'sources': file_item['sources'],
'merged_options': {'ignore_checksum': True}, # we currently dont have checksums for the archive
'archive_items': []
}
cea_id_to_pack[cea_id] = pack
download_packs.append(pack)
file_item.pop('sources')
pack['archive_items'].append(file_item)
else:
download_packs.append(file_item)
return download_packs
def _split_did_str(self, did_str):
"""
Splits a given DID string (e.g. 'scope1:name.file') into its scope and name part
(This function is meant to be used as class internal only)
:param did_str: the DID string that will be splitted
:returns: the scope- and name part of the given DID
:raises InputValidationError: if the given DID string is not valid
"""
did = did_str.split(':')
if len(did) == 2:
did_scope = did[0]
did_name = did[1]
elif len(did) == 1:
if self.extract_scope_convention == 'belleii':
scopes = [scope for scope in self.client.list_scopes()]
did_scope, did_name = extract_scope(did[0], scopes)
else:
did = did_str.split('.')
did_scope = did[0]
if did_scope == 'user' or did_scope == 'group':
did_scope = '%s.%s' % (did[0], did[1])
did_name = did_str
else:
raise InputValidationError('%s is not a valid DID. To many colons.' % did_str)
if did_name.endswith('/'):
did_name = did_name[:-1]
return did_scope, did_name
def _prepare_dest_dir(self, base_dir, dest_dir_name, no_subdir):
"""
Builds the final destination path for a file and creates the
destination directory if it's not existent.
(This function is meant to be used as class internal only)
:param base_dir: base directory part
:param dest_dir_name: name of the destination directory
:param no_subdir: if no subdirectory should be created
:returns: the absolut path of the destination directory
"""
# append dest_dir_name, if subdir should be used
if self.extract_scope_convention == 'belleii' and dest_dir_name.startswith('/'):
dest_dir_name = dest_dir_name[1:]
dest_dir_path = os.path.join(os.path.abspath(base_dir), '' if no_subdir else dest_dir_name)
if not os.path.isdir(dest_dir_path):
os.makedirs(dest_dir_path)
return dest_dir_path
def _check_output(self, output_items):
"""
Checks if all files were successfully downloaded
(This function is meant to be used as class internal only)
:param output_items: list of dictionaries describing the downloaded files
:returns: output_items list
:raises NoFilesDownloaded:
:raises NotAllFilesDownloaded:
"""
success_states = ['ALREADY_DONE', 'DONE', 'FOUND_IN_PCACHE']
# failure_states = ['FILE_NOT_FOUND', 'FAIL_VALIDATE', 'FAILED']
num_successful = 0
num_failed = 0
for item in output_items:
clientState = item.get('clientState', 'FAILED')
if clientState in success_states:
num_successful += 1
else:
num_failed += 1
if num_successful == 0:
raise NoFilesDownloaded()
elif num_failed > 0:
raise NotAllFilesDownloaded()
return output_items
def _send_trace(self, trace):
"""
Checks if sending trace is allowed and send the trace.
:param trace: the trace
"""
if self.tracing:
send_trace(trace, self.client.trace_host, self.client.user_agent)
def _verify_checksum(item, path):
rucio_checksum = item.get(PREFERRED_CHECKSUM)
local_checksum = None
checksum_algo = CHECKSUM_ALGO_DICT.get(PREFERRED_CHECKSUM)
if rucio_checksum and checksum_algo:
local_checksum = checksum_algo(path)
return rucio_checksum == local_checksum, rucio_checksum, local_checksum
for checksum_name in GLOBALLY_SUPPORTED_CHECKSUMS:
rucio_checksum = item.get(checksum_name)
checksum_algo = CHECKSUM_ALGO_DICT.get(checksum_name)
if rucio_checksum and checksum_algo:
local_checksum = checksum_algo(path)
return rucio_checksum == local_checksum, rucio_checksum, local_checksum
return False, None, None
|
var widgets = require('@jupyter-widgets/base');
var _ = require('underscore');
var L = require('../leaflet.js');
var control = require('./Control.js');
var LeafletControlView = control.LeafletControlView;
var LeafletControlModel = control.LeafletControlModel;
L.Control.WidgetControl = L.Control.extend({
updateLayout: function(options) {
if (!this._container) { return; }
Object.keys(options).forEach((option) => {
this._container.style[option] = options[option] + 'px';
});
},
getContent: function(){
return this._content;
},
setContent: function(content){
if (!this._map) { return; }
this._content = content;
this._container.appendChild(this._content);
return this;
},
onAdd: function (map) {
this._container = L.DomUtil.create('div', 'leaflet-widgetcontrol');
L.DomEvent.disableClickPropagation(this._container);
L.DomEvent.disableScrollPropagation(this._container);
return this._container;
},
});
L.control.widgetcontrol = function (options) {
return new L.Control.WidgetControl(options);
};
var LeafletWidgetControlModel = LeafletControlModel.extend({
defaults: _.extend({}, LeafletControlModel.prototype.defaults, {
_view_name: 'LeafletWidgetControlView',
_model_name: 'LeafletWidgetControlModel',
widget: null,
max_width: null,
min_width: null,
max_height: null,
min_height: null
})
}, {
serializers: _.extend({
widget: { deserialize: widgets.unpack_models }
}, LeafletControlModel.serializers)
});
var LeafletWidgetControlView = LeafletControlView.extend({
initialize: function (parameters) {
LeafletWidgetControlView.__super__.initialize.apply(this, arguments);
this.map_view = this.options.map_view;
this.widget_view = undefined;
},
set_widget: function(widget_model){
if (this.widget_view){
this.widget_view.remove();
this.widget_view = undefined;
}
if (widget_model){
return this.create_child_view(widget_model).then((view)=>{
this.widget_view = view;
// Trigger the displayed event of the child view.
this.displayed.then(() => {
this.widget_view.trigger('displayed', this);
this.widget_view.displayed.then(() => {
this.updateLayout();
this.obj.setContent(view.el);
});
});
})
}
},
create_obj: function () {
this.obj = L.control.widgetcontrol(this.get_options());
this.set_widget(this.model.get('widget'));
},
model_events: function () {
LeafletWidgetControlView.__super__.model_events.apply(this, arguments);
this.listenTo(this.model, 'change:min_width change:min_height change:max_width change:max_height', () => {
this.updateLayout();
});
this.listenTo(this.model, 'change:widget', function(model){
this.set_widget(this.model.get('widget'));
});
},
updateLayout: function() {
this.obj.updateLayout({
maxWidth: this.model.get('max_width'),
minWidth: this.model.get('min_width'),
maxHeight: this.model.get('max_height'),
minHeight: this.model.get('min_height')
});
}
});
module.exports = {
LeafletWidgetControlView: LeafletWidgetControlView,
LeafletWidgetControlModel: LeafletWidgetControlModel,
};
|
export default function isFilesSection(url) {
const u = new URL(url);
return u.pathname.includes("/pull") && u.pathname.includes("/files");
}
|
import axios from 'axios'
export function fetchTweets(params) {
return (dispatch) => {
return axios.get('/api/search', {params: params})
}
}
export function fetchUserTweets(params) {
return (dispatch) => {
return axios.get('/api/search/fetch_user_tweets', {params: params})
}
}
export function fetchUserArticles(params) {
return (dispatch) => {
return axios.get('/api/search/fetch_user_news', {params: params})
}
}
export function fetchTweetAndReplies(params) {
return (dispatch) => {
return axios.get('/api/search/fetch_tweet_and_replies', {params: params})
}
}
export function fetchArticles(params) {
return (dispatch) => {
return axios.get('/api/search/fetch_news', {params: params})
}
}
|
# Copyright 2019 Zuru Tech HK Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Trainers help reducing boilerplate code by bootstrapping models training.
The module contains a primitive Interface and specific trainers that inherits from it.
.. currentmodule:: ashpy.trainers
.. rubric:: Classes
.. autosummary::
:nosignatures:
:toctree: trainers
Trainer
AdversarialTrainer
EncoderTrainer
----
.. rubric:: Modules
.. autosummary::
:nosignatures:
:toctree: trainers
:template: autosummary/submodule.rst
trainer
classifier
gan
"""
from ashpy.trainers.classifier import ClassifierTrainer
from ashpy.trainers.gan import AdversarialTrainer, EncoderTrainer
from ashpy.trainers.trainer import Trainer
__ALL__ = [
"Trainer",
"Adversarial",
"AdversarialEncoder",
"ClassifierTrainer",
]
|
import React from 'react';
import renderer from 'react-test-renderer';
import { Provider } from 'react-redux';
import '@testing-library/jest-dom/extend-expect';
import PokemonsContainer from '../components/PokemonsContainer';
import store from '../redux/configureStore';
describe('pokemons container component', () => {
test('matches PokemonsContainer component snapshot', () => {
const tree = renderer.create(
<Provider store={store}>
<PokemonsContainer />
</Provider>,
).toJSON();
expect(tree).toMatchSnapshot();
});
});
|
"""
Title: GauGAN for conditional image generation
Author: [Soumik Rakshit](https://github.com/soumik12345), [Sayak Paul](https://twitter.com/RisingSayak)
Date created: 2021/12/26
Last modified: 2022/01/03
Description: Implementing a GauGAN for conditional image generation.
"""
"""
## Introduction
In this example, we present an implementation of the GauGAN architecture proposed in
[Semantic Image Synthesis with Spatially-Adaptive Normalization](https://arxiv.org/abs/1903.07291).
Briefly, GauGAN uses a Generative Adversarial Network (GAN) to generate realistic images
that are conditioned on cue images and segmentation maps, as shown below
([image source](https://nvlabs.github.io/SPADE/)):

The main components of a GauGAN are:
- **SPADE (aka spatially-adaptive normalization)** : The authors of GauGAN argue that the
more conventional normalization layers (such as
[Batch Normalization](https://arxiv.org/abs/1502.03167))
destroy the semantic information obtained from segmentation maps that
are provided as inputs. To address this problem, the authors introduce SPADE, a
normalization layer particularly suitable for learning affine parameters (scale and bias)
that are spatially adaptive. This is done by learning different sets of scaling and
bias parameters for each semantic label.
- **Variational encoder**: Inspired by
[Variational Autoencoders](https://arxiv.org/abs/1312.6114), GauGAN uses a
variational formulation wherein an encoder learns the mean and variance of a
normal (Gaussian) distribution from the cue images. This is where GauGAN gets its name
from. The generator of GauGAN takes as inputs the latents sampled from the Gaussian
distribution as well as the one-hot encoded semantic segmentation label maps. The cue
images act as style images that guide the generator to stylistic generation. This
variational formulation helps GauGAN achieve image diversity as well as fidelity.
- **Multi-scale patch discriminator** : Inspired by the
[PatchGAN](https://paperswithcode.com/method/patchgan) model,
GauGAN uses a discriminator that assesses a given image on a patch basis
and produces an averaged score.
As we proceed with the example, we will discuss each of the different
components in further detail.
For a thorough review of GauGAN, please refer to
[this article](https://blog.paperspace.com/nvidia-gaugan-introduction/).
We also encourage you to check out
[the official GauGAN website](https://nvlabs.github.io/SPADE/), which
has many creative applications of GauGAN. This example assumes that the reader is already
familiar with the fundamental concepts of GANs. If you need a refresher, the following
resources might be useful:
* [Chapter on GANs](https://livebook.manning.com/book/deep-learning-with-python/chapter-8)
from the Deep Learning with Python book by François Chollet.
* GAN implementations on keras.io:
* [Data efficient GANs](https://keras.io/examples/generative/gan_ada)
* [CycleGAN](https://keras.io/examples/generative/cyclegan)
* [Conditional GAN](https://keras.io/examples/generative/conditional_gan)
"""
"""
## Data collection
We will be using the
[Facades dataset](https://cmp.felk.cvut.cz/~tylecr1/facade/)
for training our GauGAN model. Let's first download it. We also install
TensorFlow Addons.
"""
"""shell
gdown https://drive.google.com/uc?id=1q4FEjQg1YSb4mPx2VdxL7LXKYu3voTMj
unzip -q facades_data.zip
pip install -qqq tensorflow_addons
"""
"""
## Imports
"""
import os
import random
import numpy as np
from tqdm import tqdm
import matplotlib.pyplot as plt
import tensorflow as tf
import tensorflow_addons as tfa
from tensorflow import keras
from tensorflow.keras import layers
from glob import glob
from PIL import Image
"""
## Data splitting
"""
PATH = "./facades_data/"
SPLIT = 0.2
files = glob(PATH + "*.jpg")
np.random.shuffle(files)
split_index = int(len(files) * (1 - SPLIT))
train_files = files[:split_index]
val_files = files[split_index:]
print(f"Total samples: {len(files)}.")
print(f"Total training samples: {len(train_files)}.")
print(f"Total validation samples: {len(val_files)}.")
"""
## Data loader
"""
BATCH_SIZE = 4
IMG_HEIGHT = IMG_WIDTH = 256
NUM_CLASSES = 12
AUTOTUNE = tf.data.AUTOTUNE
def load(image_files, batch_size, is_train=True):
def _random_crop(
segmentation_map, image, labels, crop_size=(IMG_HEIGHT, IMG_WIDTH),
):
crop_size = tf.convert_to_tensor(crop_size)
image_shape = tf.shape(image)[:2]
margins = image_shape - crop_size
y1 = tf.random.uniform(shape=(), maxval=margins[0], dtype=tf.int32)
x1 = tf.random.uniform(shape=(), maxval=margins[1], dtype=tf.int32)
y2 = y1 + crop_size[0]
x2 = x1 + crop_size[1]
cropped_images = []
images = [segmentation_map, image, labels]
for img in images:
cropped_images.append(img[y1:y2, x1:x2])
return cropped_images
def _load_data_tf(image_file, segmentation_map_file, label_file):
image = tf.image.decode_png(tf.io.read_file(image_file), channels=3)
segmentation_map = tf.image.decode_png(
tf.io.read_file(segmentation_map_file), channels=3
)
labels = tf.image.decode_bmp(tf.io.read_file(label_file), channels=0)
labels = tf.squeeze(labels)
image = tf.cast(image, tf.float32) / 127.5 - 1
segmentation_map = tf.cast(segmentation_map, tf.float32) / 127.5 - 1
return segmentation_map, image, labels
segmentation_map_files = [
image_file.replace("images", "segmentation_map").replace("jpg", "png")
for image_file in image_files
]
label_files = [
image_file.replace("images", "segmentation_labels").replace("jpg", "bmp")
for image_file in image_files
]
dataset = tf.data.Dataset.from_tensor_slices(
(image_files, segmentation_map_files, label_files)
)
dataset = dataset.shuffle(batch_size * 10) if is_train else dataset
dataset = dataset.map(_load_data_tf, num_parallel_calls=AUTOTUNE)
dataset = dataset.map(_random_crop, num_parallel_calls=AUTOTUNE)
dataset = dataset.map(
lambda x, y, z: (x, y, tf.one_hot(z, NUM_CLASSES)), num_parallel_calls=AUTOTUNE
)
return dataset.batch(batch_size, drop_remainder=True)
train_dataset = load(train_files, batch_size=BATCH_SIZE, is_train=True)
val_dataset = load(val_files, batch_size=BATCH_SIZE, is_train=False)
"""
Now, let's visualize a few samples from the training set.
"""
sample_train_batch = next(iter(train_dataset))
print(f"Segmentation map batch shape: {sample_train_batch[0].shape}.")
print(f"Image batch shape: {sample_train_batch[1].shape}.")
print(f"One-hot encoded label map shape: {sample_train_batch[2].shape}.")
# Plot a view samples from the training set.
for segmentation_map, real_image in zip(sample_train_batch[0], sample_train_batch[1]):
fig = plt.figure(figsize=(10, 10))
fig.add_subplot(1, 2, 1).set_title("Segmentation Map")
plt.imshow((segmentation_map + 1) / 2)
fig.add_subplot(1, 2, 2).set_title("Real Image")
plt.imshow((real_image + 1) / 2)
plt.show()
"""
Note that in the rest of this example, we use a couple of figures from the
[original GauGAN paper](https://arxiv.org/abs/1903.07291) for convenience.
"""
"""
## Custom layers
In the following section, we implement the following layers:
* SPADE
* Residual block including SPADE
* Gaussian sampler
"""
"""
### Some more notes on SPADE

**SPatially-Adaptive (DE) normalization** or **SPADE** is a simple but effective layer
for synthesizing photorealistic images given an input semantic layout. Previous methods
for conditional image generation from semantic input such as
Pix2Pix ([Isola et al.](https://arxiv.org/abs/1611.07004))
or Pix2PixHD ([Wang et al.](https://arxiv.org/abs/1711.11585))
directly feed the semantic layout as input to the deep network, which is then processed
through stacks of convolution, normalization, and nonlinearity layers. This is often
suboptimal as the normalization layers have a tendency to wash away semantic information.
In SPADE, the segmentation mask is first projected onto an embedding space, and then
convolved to produce the modulation parameters `γ` and `β`. Unlike prior conditional
normalization methods, `γ` and `β` are not vectors, but tensors with spatial dimensions.
The produced `γ` and `β` are multiplied and added to the normalized activation
element-wise. As the modulation parameters are adaptive to the input segmentation mask,
SPADE is better suited for semantic image synthesis.
"""
class SPADE(layers.Layer):
def __init__(self, filters, epsilon=1e-5, **kwargs):
super().__init__(**kwargs)
self.epsilon = epsilon
self.conv = layers.Conv2D(128, 3, padding="same", activation="relu")
self.conv_gamma = layers.Conv2D(filters, 3, padding="same")
self.conv_beta = layers.Conv2D(filters, 3, padding="same")
def build(self, input_shape):
self.resize_shape = input_shape[1:3]
def call(self, input_tensor, raw_mask):
mask = tf.image.resize(raw_mask, self.resize_shape, method="nearest")
x = self.conv(mask)
gamma = self.conv_gamma(x)
beta = self.conv_beta(x)
mean, var = tf.nn.moments(input_tensor, axes=(0, 1, 2), keepdims=True)
std = tf.sqrt(var + self.epsilon)
normalized = (input_tensor - mean) / std
output = gamma * normalized + beta
return output
class ResBlock(layers.Layer):
def __init__(self, filters, **kwargs):
super().__init__(**kwargs)
self.filters = filters
def build(self, input_shape):
input_filter = input_shape[-1]
self.spade_1 = SPADE(input_filter)
self.spade_2 = SPADE(self.filters)
self.conv_1 = layers.Conv2D(self.filters, 3, padding="same")
self.conv_2 = layers.Conv2D(self.filters, 3, padding="same")
self.learned_skip = False
if self.filters != input_filter:
self.learned_skip = True
self.spade_3 = SPADE(input_filter)
self.conv_3 = layers.Conv2D(self.filters, 3, padding="same")
def call(self, input_tensor, mask):
x = self.spade_1(input_tensor, mask)
x = self.conv_1(tf.nn.leaky_relu(x, 0.2))
x = self.spade_2(x, mask)
x = self.conv_2(tf.nn.leaky_relu(x, 0.2))
skip = (
self.conv_3(tf.nn.leaky_relu(self.spade_3(input_tensor, mask), 0.2))
if self.learned_skip
else input_tensor
)
output = skip + x
return output
class GaussianSampler(layers.Layer):
def __init__(self, batch_size, latent_dim, **kwargs):
super().__init__(**kwargs)
self.batch_size = batch_size
self.latent_dim = latent_dim
def call(self, inputs):
means, variance = inputs
epsilon = tf.random.normal(
shape=(self.batch_size, self.latent_dim), mean=0.0, stddev=1.0
)
samples = means + tf.exp(0.5 * variance) * epsilon
return samples
"""
Next, we implement the downsampling block for the encoder.
"""
def downsample(
channels,
kernels,
strides=2,
apply_norm=True,
apply_activation=True,
apply_dropout=False,
):
block = keras.Sequential()
block.add(
layers.Conv2D(
channels,
kernels,
strides=strides,
padding="same",
use_bias=False,
kernel_initializer=keras.initializers.GlorotNormal(),
)
)
if apply_norm:
block.add(tfa.layers.InstanceNormalization())
if apply_activation:
block.add(layers.LeakyReLU(0.2))
if apply_dropout:
block.add(layers.Dropout(0.5))
return block
"""
The GauGAN encoder consists of a few downsampling blocks. It outputs the mean and
variance of a distribution.

"""
def build_encoder(image_shape, encoder_downsample_factor=64, latent_dim=256):
input_image = keras.Input(shape=image_shape)
x = downsample(encoder_downsample_factor, 3, apply_norm=False)(input_image)
x = downsample(2 * encoder_downsample_factor, 3)(x)
x = downsample(4 * encoder_downsample_factor, 3)(x)
x = downsample(8 * encoder_downsample_factor, 3)(x)
x = downsample(8 * encoder_downsample_factor, 3)(x)
x = layers.Flatten()(x)
mean = layers.Dense(latent_dim, name="mean")(x)
variance = layers.Dense(latent_dim, name="variance")(x)
return keras.Model(input_image, [mean, variance], name="encoder")
"""
Next, we implement the generator, which consists of the modified residual blocks and
upsampling blocks. It takes latent vectors and one-hot encoded segmentation labels, and
produces new images.

With SPADE, there is no need to feed the segmentation map to the first layer of the
generator, since the latent inputs have enough structural information about the style we
want the generator to emulate. We also discard the encoder part of the generator, which is
commonly used in prior architectures. This results in a more lightweight
generator network, which can also take a random vector as input, enabling a simple and
natural path to multi-modal synthesis.
"""
def build_generator(mask_shape, latent_dim=256):
latent = keras.Input(shape=(latent_dim))
mask = keras.Input(shape=mask_shape)
x = layers.Dense(16384)(latent)
x = layers.Reshape((4, 4, 1024))(x)
x = ResBlock(filters=1024)(x, mask)
x = layers.UpSampling2D((2, 2))(x)
x = ResBlock(filters=1024)(x, mask)
x = layers.UpSampling2D((2, 2))(x)
x = ResBlock(filters=1024)(x, mask)
x = layers.UpSampling2D((2, 2))(x)
x = ResBlock(filters=512)(x, mask)
x = layers.UpSampling2D((2, 2))(x)
x = ResBlock(filters=256)(x, mask)
x = layers.UpSampling2D((2, 2))(x)
x = ResBlock(filters=128)(x, mask)
x = layers.UpSampling2D((2, 2))(x)
x = tf.nn.leaky_relu(x, 0.2)
output_image = tf.nn.tanh(layers.Conv2D(3, 4, padding="same")(x))
return keras.Model([latent, mask], output_image, name="generator")
"""
The discriminator takes a segmentation map and an image and concatenates them. It
then predicts if patches of the concatenated image are real or fake.

"""
def build_discriminator(image_shape, downsample_factor=64):
input_image_A = keras.Input(shape=image_shape, name="discriminator_image_A")
input_image_B = keras.Input(shape=image_shape, name="discriminator_image_B")
x = layers.Concatenate()([input_image_A, input_image_B])
x1 = downsample(downsample_factor, 4, apply_norm=False)(x)
x2 = downsample(2 * downsample_factor, 4)(x1)
x3 = downsample(4 * downsample_factor, 4)(x2)
x4 = downsample(8 * downsample_factor, 4, strides=1)(x3)
x5 = layers.Conv2D(1, 4)(x4)
outputs = [x1, x2, x3, x4, x5]
return keras.Model([input_image_A, input_image_B], outputs)
"""
## Loss functions
GauGAN uses the following loss functions:
* Generator:
* Expectation over the discriminator predictions.
* [KL divergence](https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence)
for learning the mean and variance predicted by the encoder.
* Minimization between the discriminator predictions on original and generated
images to align the feature space of the generator.
* [Perceptual loss](https://arxiv.org/abs/1603.08155) for encouraging the generated
images to have perceptual quality.
* Discriminator:
* [Hinge loss](https://en.wikipedia.org/wiki/Hinge_loss).
"""
def generator_loss(y):
return -tf.reduce_mean(y)
def kl_divergence_loss(mean, variance):
return -0.5 * tf.reduce_sum(1 + variance - tf.square(mean) - tf.exp(variance))
class FeatureMatchingLoss(keras.losses.Loss):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.mae = keras.losses.MeanAbsoluteError()
def call(self, y_true, y_pred):
loss = 0
for i in range(len(y_true) - 1):
loss += self.mae(y_true[i], y_pred[i])
return loss
class VGGFeatureMatchingLoss(keras.losses.Loss):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.encoder_layers = [
"block1_conv1",
"block2_conv1",
"block3_conv1",
"block4_conv1",
"block5_conv1",
]
self.weights = [1.0 / 32, 1.0 / 16, 1.0 / 8, 1.0 / 4, 1.0]
vgg = keras.applications.VGG19(include_top=False, weights="imagenet")
layer_outputs = [vgg.get_layer(x).output for x in self.encoder_layers]
self.vgg_model = keras.Model(vgg.input, layer_outputs, name="VGG")
self.mae = keras.losses.MeanAbsoluteError()
def call(self, y_true, y_pred):
y_true = keras.applications.vgg19.preprocess_input(127.5 * (y_true + 1))
y_pred = keras.applications.vgg19.preprocess_input(127.5 * (y_pred + 1))
real_features = self.vgg_model(y_true)
fake_features = self.vgg_model(y_pred)
loss = 0
for i in range(len(real_features)):
loss += self.weights[i] * self.mae(real_features[i], fake_features[i])
return loss
class DiscriminatorLoss(keras.losses.Loss):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.hinge_loss = keras.losses.Hinge()
def call(self, y, is_real):
label = 1.0 if is_real else -1.0
return self.hinge_loss(label, y)
"""
## GAN monitor callback
Next, we implement a callback to monitor the GauGAN results while it is training.
"""
class GanMonitor(keras.callbacks.Callback):
def __init__(self, val_dataset, n_samples, epoch_interval=5):
self.val_images = next(iter(val_dataset))
self.n_samples = n_samples
self.epoch_interval = epoch_interval
def infer(self):
latent_vector = tf.random.normal(
shape=(self.model.batch_size, self.model.latent_dim), mean=0.0, stddev=2.0
)
return self.model.predict([latent_vector, self.val_images[2]])
def on_epoch_end(self, epoch, logs=None):
if epoch % self.epoch_interval == 0:
generated_images = self.infer()
for _ in range(self.n_samples):
grid_row = min(generated_images.shape[0], 3)
f, axarr = plt.subplots(grid_row, 3, figsize=(18, grid_row * 6))
for row in range(grid_row):
ax = axarr if grid_row == 1 else axarr[row]
ax[0].imshow((self.val_images[0][row] + 1) / 2)
ax[0].axis("off")
ax[0].set_title("Mask", fontsize=20)
ax[1].imshow((self.val_images[1][row] + 1) / 2)
ax[1].axis("off")
ax[1].set_title("Ground Truth", fontsize=20)
ax[2].imshow((generated_images[row] + 1) / 2)
ax[2].axis("off")
ax[2].set_title("Generated", fontsize=20)
plt.show()
"""
## Subclassed GauGAN model
Finally, we put everything together inside a subclassed model (from `tf.keras.Model`)
overriding its `train_step()` method.
"""
class GauGAN(keras.Model):
def __init__(
self,
image_size,
num_classes,
batch_size,
latent_dim,
feature_loss_coeff=10,
vgg_feature_loss_coeff=0.1,
kl_divergence_loss_coeff=0.1,
**kwargs,
):
super().__init__(**kwargs)
self.image_size = image_size
self.latent_dim = latent_dim
self.batch_size = batch_size
self.num_classes = num_classes
self.image_shape = (image_size, image_size, 3)
self.mask_shape = (image_size, image_size, num_classes)
self.feature_loss_coeff = feature_loss_coeff
self.vgg_feature_loss_coeff = vgg_feature_loss_coeff
self.kl_divergence_loss_coeff = kl_divergence_loss_coeff
self.discriminator = build_discriminator(self.image_shape)
self.generator = build_generator(self.mask_shape)
self.encoder = build_encoder(self.image_shape)
self.sampler = GaussianSampler(batch_size, latent_dim)
self.patch_size, self.combined_model = self.build_combined_generator()
self.disc_loss_tracker = tf.keras.metrics.Mean(name="disc_loss")
self.gen_loss_tracker = tf.keras.metrics.Mean(name="gen_loss")
self.feat_loss_tracker = tf.keras.metrics.Mean(name="feat_loss")
self.vgg_loss_tracker = tf.keras.metrics.Mean(name="vgg_loss")
self.kl_loss_tracker = tf.keras.metrics.Mean(name="kl_loss")
@property
def metrics(self):
return [
self.disc_loss_tracker,
self.gen_loss_tracker,
self.feat_loss_tracker,
self.vgg_loss_tracker,
self.kl_loss_tracker,
]
def build_combined_generator(self):
# This method builds a model that takes as inputs the following:
# latent vector, one-hot encoded segmentation label map, and
# a segmentation map. It then (i) generates an image with the generator,
# (ii) passes the generated images and segmentation map to the discriminator.
# Finally, the model produces the following outputs: (a) discriminator outputs,
# (b) generated image.
# We will be using this model to simplify the implementation.
self.discriminator.trainable = False
mask_input = keras.Input(shape=self.mask_shape, name="mask")
image_input = keras.Input(shape=self.image_shape, name="image")
latent_input = keras.Input(shape=(self.latent_dim), name="latent")
generated_image = self.generator([latent_input, mask_input])
discriminator_output = self.discriminator([image_input, generated_image])
patch_size = discriminator_output[-1].shape[1]
combined_model = keras.Model(
[latent_input, mask_input, image_input],
[discriminator_output, generated_image],
)
return patch_size, combined_model
def compile(self, gen_lr=1e-4, disc_lr=4e-4, **kwargs):
super().compile(**kwargs)
self.generator_optimizer = keras.optimizers.Adam(
gen_lr, beta_1=0.0, beta_2=0.999
)
self.discriminator_optimizer = keras.optimizers.Adam(
disc_lr, beta_1=0.0, beta_2=0.999
)
self.discriminator_loss = DiscriminatorLoss()
self.feature_matching_loss = FeatureMatchingLoss()
self.vgg_loss = VGGFeatureMatchingLoss()
def train_discriminator(self, latent_vector, segmentation_map, real_image, labels):
fake_images = self.generator([latent_vector, labels])
with tf.GradientTape() as gradient_tape:
pred_fake = self.discriminator([segmentation_map, fake_images])[-1]
pred_real = self.discriminator([segmentation_map, real_image])[-1]
loss_fake = self.discriminator_loss(pred_fake, False)
loss_real = self.discriminator_loss(pred_real, True)
total_loss = 0.5 * (loss_fake + loss_real)
self.discriminator.trainable = True
gradients = gradient_tape.gradient(
total_loss, self.discriminator.trainable_variables
)
self.discriminator_optimizer.apply_gradients(
zip(gradients, self.discriminator.trainable_variables)
)
return total_loss
def train_generator(
self, latent_vector, segmentation_map, labels, image, mean, variance
):
# Generator learns through the signal provided by the discriminator. During
# backpropagation, we only update the generator parameters.
self.discriminator.trainable = False
with tf.GradientTape() as tape:
real_d_output = self.discriminator([segmentation_map, image])
fake_d_output, fake_image = self.combined_model(
[latent_vector, labels, segmentation_map]
)
pred = fake_d_output[-1]
# Compute generator losses.
g_loss = generator_loss(pred)
kl_loss = self.kl_divergence_loss_coeff * kl_divergence_loss(mean, variance)
vgg_loss = self.vgg_feature_loss_coeff * self.vgg_loss(image, fake_image)
feature_loss = self.feature_loss_coeff * self.feature_matching_loss(
real_d_output, fake_d_output
)
total_loss = g_loss + kl_loss + vgg_loss + feature_loss
gradients = tape.gradient(total_loss, self.combined_model.trainable_variables)
self.generator_optimizer.apply_gradients(
zip(gradients, self.combined_model.trainable_variables)
)
return total_loss, feature_loss, vgg_loss, kl_loss
def train_step(self, data):
segmentation_map, image, labels = data
mean, variance = self.encoder(image)
latent_vector = self.sampler([mean, variance])
discriminator_loss = self.train_discriminator(
latent_vector, segmentation_map, image, labels
)
(generator_loss, feature_loss, vgg_loss, kl_loss) = self.train_generator(
latent_vector, segmentation_map, labels, image, mean, variance
)
# Report progress.
self.disc_loss_tracker.update_state(discriminator_loss)
self.gen_loss_tracker.update_state(generator_loss)
self.feat_loss_tracker.update_state(feature_loss)
self.vgg_loss_tracker.update_state(vgg_loss)
self.kl_loss_tracker.update_state(kl_loss)
results = {m.name: m.result() for m in self.metrics}
return results
def test_step(self, data):
segmentation_map, image, labels = data
# Obtain the learned moments of the real image distribution.
mean, variance = self.encoder(image)
# Sample a latent from the distribution defined by the learned moments.
latent_vector = self.sampler([mean, variance])
# Generate the fake images.
fake_images = self.generator([latent_vector, labels])
# Calculate the losses.
pred_fake = self.discriminator([segmentation_map, fake_images])[-1]
pred_real = self.discriminator([segmentation_map, image])[-1]
loss_fake = self.discriminator_loss(pred_fake, False)
loss_real = self.discriminator_loss(pred_real, True)
total_discriminator_loss = 0.5 * (loss_fake + loss_real)
real_d_output = self.discriminator([segmentation_map, image])
fake_d_output, fake_image = self.combined_model(
[latent_vector, labels, segmentation_map]
)
pred = fake_d_output[-1]
g_loss = generator_loss(pred)
kl_loss = self.kl_divergence_loss_coeff * kl_divergence_loss(mean, variance)
vgg_loss = self.vgg_feature_loss_coeff * self.vgg_loss(image, fake_image)
feature_loss = self.feature_loss_coeff * self.feature_matching_loss(
real_d_output, fake_d_output
)
total_generator_loss = g_loss + kl_loss + vgg_loss + feature_loss
# Report progress.
self.disc_loss_tracker.update_state(total_discriminator_loss)
self.gen_loss_tracker.update_state(total_generator_loss)
self.feat_loss_tracker.update_state(feature_loss)
self.vgg_loss_tracker.update_state(vgg_loss)
self.kl_loss_tracker.update_state(kl_loss)
results = {m.name: m.result() for m in self.metrics}
return results
def call(self, inputs):
latent_vectors, labels = inputs
return self.generator([latent_vectors, labels])
"""
## GauGAN training
"""
gaugan = GauGAN(IMG_HEIGHT, NUM_CLASSES, BATCH_SIZE, latent_dim=256)
gaugan.compile()
history = gaugan.fit(
train_dataset,
validation_data=val_dataset,
epochs=15,
callbacks=[GanMonitor(val_dataset, BATCH_SIZE)],
)
def plot_history(item):
plt.plot(history.history[item], label=item)
plt.plot(history.history["val_" + item], label="val_" + item)
plt.xlabel("Epochs")
plt.ylabel(item)
plt.title("Train and Validation {} Over Epochs".format(item), fontsize=14)
plt.legend()
plt.grid()
plt.show()
plot_history("disc_loss")
plot_history("gen_loss")
plot_history("feat_loss")
plot_history("vgg_loss")
plot_history("kl_loss")
"""
## Inference
"""
val_iterator = iter(val_dataset)
for _ in range(5):
val_images = next(val_iterator)
# Sample latent from a normal distribution.
latent_vector = tf.random.normal(
shape=(gaugan.batch_size, gaugan.latent_dim), mean=0.0, stddev=2.0
)
# Generate fake images.
fake_images = gaugan.predict([latent_vector, val_images[2]])
real_images = val_images
grid_row = min(fake_images.shape[0], 3)
grid_col = 3
f, axarr = plt.subplots(grid_row, grid_col, figsize=(grid_col * 6, grid_row * 6))
for row in range(grid_row):
ax = axarr if grid_row == 1 else axarr[row]
ax[0].imshow((real_images[0][row] + 1) / 2)
ax[0].axis("off")
ax[0].set_title("Mask", fontsize=20)
ax[1].imshow((real_images[1][row] + 1) / 2)
ax[1].axis("off")
ax[1].set_title("Ground Truth", fontsize=20)
ax[2].imshow((fake_images[row] + 1) / 2)
ax[2].axis("off")
ax[2].set_title("Generated", fontsize=20)
plt.show()
"""
## Final words
* The dataset we used in this example is a small one. For obtaining even better results
we recommend to use a bigger dataset. GauGAN results were demonstrated with the
[COCO-Stuff](https://github.com/nightrome/cocostuff) and
[CityScapes](https://www.cityscapes-dataset.com/) datasets.
* If you found this example interesting and exciting, you might want to check out
[our repository](https://github.com/soumik12345/tf2_gans) which we are
currently building. It will include reimplementations of popular GANs and pretrained
models. Our focus will be on readibility and making the code as accessible as possible.
Our plain is to first train our implementation of GauGAN (following the code of
this example) on a bigger dataset and then make the repository public. We welcome
contributions!
* Recently GauGAN2 was also released. You can check it out
[here](https://blogs.nvidia.com/blog/2021/11/22/gaugan2-ai-art-demo/).
"""
|
// User Registration Profile model
module.exports = function(sequelize, DataTypes) {
const User_Registration_Profile = sequelize.define(
'User_Registration_Profile',
{
id: {
type: DataTypes.INTEGER,
primaryKey: true,
autoIncrement: true,
},
activation_key: {
type: DataTypes.STRING,
},
activation_expires: {
type: DataTypes.DATE,
},
reset_key: {
type: DataTypes.STRING,
defaultValue: undefined,
},
reset_expires: {
type: DataTypes.DATE,
defaultValue: undefined,
},
verification_key: {
type: DataTypes.STRING,
defaultValue: undefined,
},
verification_expires: {
type: DataTypes.DATE,
defaultValue: undefined,
},
},
{
tableName: 'user_registration_profile',
timestamps: false,
underscored: true,
}
);
return User_Registration_Profile;
};
|
import React from "react"
import { Link } from "gatsby"
import Img from "gatsby-image"
export default function brand({ brand }) {
return (
<div className="col-10 col-sm-8 col-6 col-lg-4 mx-auto my-3">
<div className="card" style={{ minHeight: "100%" }}>
<Link to="/maboutique" className="image-link">
<Img fluid={brand.image.fluid} className="card-img-top" />
</Link>
<div className="card-body text-center">
<h6>{brand.title}</h6>
<h6 className="text-yellow">A partir de {brand.price}€</h6>
<div className="">
<h6>{brand.date}</h6>
</div>
<Link className="" to="/maboutique">
<button
className="btn btn-yellow mt-3 text-capitalize"
data-item-url="https://shoeso972.netlify.com/"
>
Juste pour voir
</button>
</Link>
</div>
</div>
</div>
)
}
|
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
import { CommonModule } from "@angular/common";
import { NgModule } from "@angular/core";
import { ScrollObservableService } from "./service";
import { VirtualRowComponent } from "./virtualRow.component";
import { VirtualScrollComponent } from "./virtualScroll.component";
var VirtualScrollModule = (function () {
function VirtualScrollModule() {
}
VirtualScrollModule.decorators = [
{ type: NgModule, args: [{
declarations: [VirtualRowComponent, VirtualScrollComponent],
entryComponents: [VirtualRowComponent],
exports: [VirtualScrollComponent],
imports: [CommonModule],
providers: [ScrollObservableService],
},] },
];
/** @nocollapse */
VirtualScrollModule.ctorParameters = function () { return []; };
return VirtualScrollModule;
}());
export { VirtualScrollModule };
function VirtualScrollModule_tsickle_Closure_declarations() {
/** @type {!Array<{type: !Function, args: (undefined|!Array<?>)}>} */
VirtualScrollModule.decorators;
/**
* @nocollapse
* @type {function(): !Array<(null|{type: ?, decorators: (undefined|!Array<{type: !Function, args: (undefined|!Array<?>)}>)})>}
*/
VirtualScrollModule.ctorParameters;
}
//# sourceMappingURL=virtualScroll.module.js.map |
"""Provides OSS compatibile macros."""
load("//tools/build_defs:glob_defs.bzl", "subdir_glob")
load("//tools/build_defs/android:fb_xplat_cxx_library.bzl", "fb_xplat_cxx_library")
def profilo_path(dep):
return "//" + dep
def profilo_oss_android_library(**kwargs):
"""Delegates to the native android_library rule."""
native.android_library(**kwargs)
def profilo_oss_cxx_library(**kwargs):
"""Delegates to the native cxx_library rule."""
native.cxx_library(**kwargs)
def profilo_oss_java_library(**kwargs):
"""Delegates to the native java_library rule."""
native.java_library(**kwargs)
def profilo_oss_only_java_library(**kwargs):
profilo_oss_java_library(**kwargs)
def profilo_oss_maven_library(
name,
group,
artifact,
version,
sha1,
visibility,
packaging = "jar",
scope = "compiled"):
"""
Creates remote_file and prebuilt_jar rules for a maven artifact.
"""
_ignore = scope
remote_file_name = "{}-remote".format(name)
remote_file(
name = remote_file_name,
out = "{}-{}.{}".format(name, version, packaging),
sha1 = sha1,
url = ":".join(["mvn", group, artifact, packaging, version]),
)
if packaging == "jar":
native.prebuilt_jar(
name = name,
binary_jar = ":{}".format(remote_file_name),
visibility = visibility,
)
else:
native.android_prebuilt_aar(
name = name,
aar = ":{}".format(remote_file_name),
visibility = visibility,
)
def profilo_oss_xplat_cxx_library(**kwargs):
fb_xplat_cxx_library(**kwargs)
def profilo_maybe_hidden_visibility():
return ["-fvisibility=hidden"]
def setup_profilo_oss_xplat_cxx_library():
profilo_oss_xplat_cxx_library(
name = "fbjni",
srcs = glob([
"cxx/fbjni/**/*.cpp",
]),
header_namespace = "",
exported_headers = subdir_glob([
("cxx", "fbjni/**/*.h"),
]),
compiler_flags = [
"-fexceptions",
"-fno-omit-frame-pointer",
"-frtti",
"-ffunction-sections",
],
exported_platform_headers = [
(
"^(?!android-arm$).*$",
subdir_glob([
("cxx", "lyra/**/*.h"),
]),
),
],
exported_platform_linker_flags = [
(
"^android",
["-llog"],
),
# There is a bug in ndk that would make linking fail for android log
# lib. This is a workaround for older ndk, because newer ndk would
# use a linker without bug.
# See https://github.com/android-ndk/ndk/issues/556
(
"^android-arm64",
["-fuse-ld=gold"],
),
(
"^android-x86",
["-latomic"],
),
],
platform_srcs = [
(
"^(?!android-arm$).*$",
glob([
"cxx/lyra/*.cpp",
]),
),
],
soname = "libfbjni.$(ext)",
visibility = [
"PUBLIC",
],
)
def setup_profilo_oss_cxx_library():
profilo_oss_cxx_library(
name = "procmaps",
srcs = [
"procmaps.c",
],
header_namespace = "",
exported_headers = subdir_glob([
("", "*.h"),
]),
compiler_flags = [
"-std=gnu99",
],
force_static = True,
visibility = [
"PUBLIC",
],
)
|
#include "check_debug.h"
int copy_from_user(void *dest, void *src, int size){}
struct my_struct {
int x, y;
};
void *pointer;
struct my_struct *dest;
struct my_struct *returns_copy(void)
{
copy_from_user(dest, pointer, sizeof(*dest));
return dest;
}
struct my_struct *a;
void test(void)
{
a = returns_copy();
__smatch_user_rl(a->x);
}
/*
* check-name: smatch user data #2
* check-command: smatch -p=kernel -I.. sm_user_data2.c
*
* check-output-start
sm_user_data2.c:22 test() user rl: 'a->x' = 's32min-s32max'
* check-output-end
*/
|
#if 0
//
// Generated by Microsoft (R) HLSL Shader Compiler 9.29.952.3111
//
//
// fxc /nologo /E PS_FtoU_PM_RGBA_2D /T ps_4_0 /Fh
// compiled\multiplyalpha_ftou_pm_rgba_2d_ps.h MultiplyAlpha.hlsl
//
//
// Resource Bindings:
//
// Name Type Format Dim Slot Elements
// ------------------------------ ---------- ------- ----------- ---- --------
// Sampler sampler NA NA 0 1
// TextureF texture float4 2d 0 1
//
//
//
// Input signature:
//
// Name Index Mask Register SysValue Format Used
// -------------------- ----- ------ -------- -------- ------ ------
// SV_POSITION 0 xyzw 0 POS float
// TEXCOORD 0 xy 1 NONE float xy
//
//
// Output signature:
//
// Name Index Mask Register SysValue Format Used
// -------------------- ----- ------ -------- -------- ------ ------
// SV_TARGET 0 xyzw 0 TARGET uint xyzw
//
ps_4_0
dcl_sampler s0, mode_default
dcl_resource_texture2d (float,float,float,float) t0
dcl_input_ps linear v1.xy
dcl_output o0.xyzw
dcl_temps 1
sample r0.xyzw, v1.xyxx, t0.xyzw, s0
mul r0.xyz, r0.wwww, r0.xyzx
mul r0.xyzw, r0.xyzw, l(255.000000, 255.000000, 255.000000, 255.000000)
ftou o0.xyzw, r0.xyzw
ret
// Approximately 5 instruction slots used
#endif
const BYTE g_PS_FtoU_PM_RGBA_2D[] = {
68, 88, 66, 67, 93, 192, 148, 235, 21, 122, 15, 156, 33, 238, 227, 9, 210, 97, 80,
205, 1, 0, 0, 0, 176, 2, 0, 0, 5, 0, 0, 0, 52, 0, 0, 0, 220, 0,
0, 0, 52, 1, 0, 0, 104, 1, 0, 0, 52, 2, 0, 0, 82, 68, 69, 70, 160,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 28, 0, 0, 0,
0, 4, 255, 255, 0, 1, 0, 0, 109, 0, 0, 0, 92, 0, 0, 0, 3, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 1, 0, 0, 0, 100, 0, 0, 0, 2, 0, 0, 0, 5, 0, 0, 0, 4,
0, 0, 0, 255, 255, 255, 255, 0, 0, 0, 0, 1, 0, 0, 0, 13, 0, 0, 0,
83, 97, 109, 112, 108, 101, 114, 0, 84, 101, 120, 116, 117, 114, 101, 70, 0, 77, 105,
99, 114, 111, 115, 111, 102, 116, 32, 40, 82, 41, 32, 72, 76, 83, 76, 32, 83, 104,
97, 100, 101, 114, 32, 67, 111, 109, 112, 105, 108, 101, 114, 32, 57, 46, 50, 57, 46,
57, 53, 50, 46, 51, 49, 49, 49, 0, 171, 171, 73, 83, 71, 78, 80, 0, 0, 0,
2, 0, 0, 0, 8, 0, 0, 0, 56, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0,
0, 3, 0, 0, 0, 0, 0, 0, 0, 15, 0, 0, 0, 68, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 1, 0, 0, 0, 3, 3, 0, 0, 83,
86, 95, 80, 79, 83, 73, 84, 73, 79, 78, 0, 84, 69, 88, 67, 79, 79, 82, 68,
0, 171, 171, 171, 79, 83, 71, 78, 44, 0, 0, 0, 1, 0, 0, 0, 8, 0, 0,
0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 15, 0, 0, 0, 83, 86, 95, 84, 65, 82, 71, 69, 84, 0, 171, 171, 83,
72, 68, 82, 196, 0, 0, 0, 64, 0, 0, 0, 49, 0, 0, 0, 90, 0, 0, 3,
0, 96, 16, 0, 0, 0, 0, 0, 88, 24, 0, 4, 0, 112, 16, 0, 0, 0, 0,
0, 85, 85, 0, 0, 98, 16, 0, 3, 50, 16, 16, 0, 1, 0, 0, 0, 101, 0,
0, 3, 242, 32, 16, 0, 0, 0, 0, 0, 104, 0, 0, 2, 1, 0, 0, 0, 69,
0, 0, 9, 242, 0, 16, 0, 0, 0, 0, 0, 70, 16, 16, 0, 1, 0, 0, 0,
70, 126, 16, 0, 0, 0, 0, 0, 0, 96, 16, 0, 0, 0, 0, 0, 56, 0, 0,
7, 114, 0, 16, 0, 0, 0, 0, 0, 246, 15, 16, 0, 0, 0, 0, 0, 70, 2,
16, 0, 0, 0, 0, 0, 56, 0, 0, 10, 242, 0, 16, 0, 0, 0, 0, 0, 70,
14, 16, 0, 0, 0, 0, 0, 2, 64, 0, 0, 0, 0, 127, 67, 0, 0, 127, 67,
0, 0, 127, 67, 0, 0, 127, 67, 28, 0, 0, 5, 242, 32, 16, 0, 0, 0, 0,
0, 70, 14, 16, 0, 0, 0, 0, 0, 62, 0, 0, 1, 83, 84, 65, 84, 116, 0,
0, 0, 5, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 2,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0};
|
import random
def runRock(x):
""" Funkcja obsługująca grę w "Kamień, papier, nożyce".
Args:
x (int): Wybór gracza.
Returns:
str: Rezultat gry.
"""
var = int(x)
en = random.randint(1,3)
if var == 1:
mr = "Kamień"
elif var == 2:
mr = "Papier"
else:
mr = "Nożyce"
if en == 1:
er = "Kamień"
elif en == 2:
er = "Papier"
else:
er = "Nożyce"
if en==var:
out = "REMIS"
elif (en==1 and var == 2) or (en==2 and var==3) or (en==3 and var==1):
out = "WYGRANA"
else:
out = "PRZEGRANA"
return f"<h1>WYNIK:</h1><h3>Wybrales: {mr}, przeciwnik wybral: {er}</h3><br /><h1>{out}</h1>"
|
from django.contrib.gis.geos import GeometryCollection, GEOSGeometry
from django.db import transaction
from rest_framework import serializers
from shapely.geometry import shape
from shapely.wkb import dumps
from .. import models
class RegionSerializer(serializers.BaseSerializer):
def to_internal_value(self, data):
return data
def to_representation(self, instance: models.Region) -> dict:
data = {
'type': 'FeatureCollection',
'features': [],
}
features = models.Feature.objects.filter(parent_region=instance).all()
for feature in features:
subdata = {
'geometry': feature.footprint.json,
'properties': feature.properties,
'type': 'Feature',
}
data['features'].append(subdata)
return data
@transaction.atomic
def create(self, data):
assert data['type'] == 'FeatureCollection'
geometries = []
for json_feature in data.get('features', []):
geometries.append(GEOSGeometry(memoryview(dumps(shape(json_feature['geometry'])))))
instance = models.Region()
instance.footprint = GeometryCollection(*geometries)
instance.outline = instance.footprint.convex_hull
instance.skip_signal = True
instance.save()
for i, json_feature in enumerate(data.get('features', [])):
properties = json_feature['properties']
if properties['type'] == 'region':
instance.version = properties['version']
instance.save(
update_fields=[
'version',
]
)
feature = models.Feature()
feature.parent_region = instance
feature.properties = properties
feature.footprint = geometries[i]
feature.outline = feature.footprint.convex_hull
feature.start_date = properties['start_date']
feature.end_date = properties['end_date']
feature.save()
return instance
|
#!/usr/bin/env python3
import numpy as np
def load_data(latency_fn, features_fn, drop_fn):
latency_file = open(latency_fn)
features_file = open(features_fn)
drop_file = open(drop_fn)
x = []
y_l = []
y_d = []
y_e = []
for latency_line in latency_file.readlines(): #timestamp, latency, network_state
features_line = features_file.readline()
drop_line = drop_file.readline() #drop
# TCP:
# approx. server, approx. aggregation switch, approx. aggregation interface,
# approx. ToR, change in timestamp since last message,
# ewma of packet interarrival time
# Homa:
# approx. server, approx. aggregation switch, approx. aggregation interface,
# approx. ToR, priority, length, change in timestamp since last message,
# ewma of packet interarrival time
split_line = features_line.strip().split()
timestamp, latency, network_state = latency_line.strip().split()
drop_toks = drop_line.strip().split()
drop = drop_toks[0]
if len(drop_toks) > 1:
ecn = drop_toks[1]
x_new = [float(network_state)]
for val in split_line:
x_new.append(float(val))
#x_new = netstate, emulated_server, agg, agg_intf, emulated_tor,
# time_diff, ewma, [start_ecn]
x.append(x_new)
y_d.append(int(drop))
y_l.append(float(latency))
if len(drop_toks) > 1:
y_e.append(int(ecn))
x = np.array(x)
y_d = np.array(y_d)
y_l = np.array(y_l)
y_e = np.array(y_e)
return (x, y_d, y_l, y_e)
if __name__=="__main__":
import sys
import argparse
import pickle
parser = argparse.ArgumentParser()
parser.add_argument("features", type=str, help="Feature file path")
parser.add_argument("latencies", type=str, help="Latency data file path")
parser.add_argument("drops", type=str, help="Drop data file path")
parser.add_argument("output_file", type=str, help="Output file path")
args = parser.parse_args()
x, y_d, y_l, y_e = load_data(args.latencies, args.features, args.drops)
if y_e.size != 0:
data = {'X':x, 'y_d':y_d, 'y_l': y_l, 'y_e': y_e}
else:
data = {'X':x, 'y_d':y_d, 'y_l': y_l}
pickle.dump(data, open(args.output_file,'wb'))
|
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#else
#ifndef FOUNDATION_EXPORT
#if defined(__cplusplus)
#define FOUNDATION_EXPORT extern "C"
#else
#define FOUNDATION_EXPORT extern
#endif
#endif
#endif
FOUNDATION_EXPORT double Pods_nanoUI_ExampleVersionNumber;
FOUNDATION_EXPORT const unsigned char Pods_nanoUI_ExampleVersionString[];
|
locais = ["China", "Japão", "India", "Russia"]
print(locais)
print(sorted(locais))
print(locais)
print(sorted(locais, reverse=True))
locais.reverse()
print(locais)
locais.reverse()
print(locais)
locais.sort()
print(locais)
locais.sort(reverse=True)
print(locais)
|
# -*- coding:utf-8 -*-
# PROJECT_NAME : django-laravel-validator
# FILE_NAME :
# AUTHOR : younger shen
class BaseValidatorException(Exception):
pass
class InvalidValidateDataError(BaseValidatorException):
def __init__(self, message=None):
self.message = message if message else 'invalid validate data error'
def __str__(self):
return self.message
class InvalidMinValidatorParameterError(BaseValidatorException):
def __init__(self, message=None):
self.message = message if message else 'invalid MIN validator parameter error'
def __str__(self):
return self.message
class InvalidMaxValidatorParameterError(BaseValidatorException):
def __init__(self, message=None):
self.message = message if message else 'invalid MAX validator parameter error'
def __str__(self):
return self.message
class InvalidRangeValidatorParameterError(BaseValidatorException):
def __init__(self, message=None):
self.message = message if message else 'invalid RANGE validator parameter error'
def __str__(self):
return self.message
class InvalidLengthValidatorParameterError(BaseValidatorException):
def __init__(self, message=None):
self.mesage = message if message else 'invalid LENGTH validator parameter error'
def __str__(self):
return self.message
class InvalidAcceptedValidatorParameterError(BaseValidatorException):
def __init__(self, message=None):
self.message = message if message else 'invalid ACCEPTED validator parameter error'
def __str__(self):
return self.message
class InvalidActiveURLValidatorParameterError(BaseValidatorException):
def __init__(self, message=None):
self.message = message if message else 'invalid active_url validator parameter error'
def __str__(self):
return self.message
class InvalidAlphaValidatorParameterError(BaseValidatorException):
def __init__(self, message=None):
self.message = message if message else 'invalid alpha validator parameter error'
def __str__(self):
return self.message
class InvalidRegexValidatorParameterError(BaseValidatorException):
def __init__(self, message=None):
self.message = message if message else 'invalid regex validator parameter error'
def __str__(self):
return self.message
class InvalidDataError(BaseValidatorException):
def __init__(self, message=None):
self.message = message if message else 'invalid data error'
def __str__(self):
return self.message
class InvalidRuleNameError(BaseValidatorException):
def __init__(self, message=None, rule=None):
self.message = message if message else 'invalid rule name error'
if rule:
self.message = self.message + ': ' + rule
def __str__(self):
return self.message
class InvalidMatchValidatorParameterError(BaseValidatorException):
def __init__(self, message=None):
self.message = message if message else 'invalid match validator parameter error'
def __str__(self):
return self.message
class InvalidValidateDataError(BaseException):
def __init__(self, message=None):
self.message = message if message else 'invalid validate data error'
def __str__(self):
return self.message
class InvalidUniqueValidatorParameterError(Exception):
def __init__(self, message=None):
self.message = message if message else 'invalid unique validator parameter error'
def __str__(self):
return self.message
|
import { useRef, useState } from 'react';
import { Link as RouterLink } from 'react-router-dom';
// material
import { Menu, MenuItem, IconButton, ListItemIcon, ListItemText, Backdrop, CircularProgress } from '@mui/material';
// component
import Iconify from '../../../components/Iconify';
import axios from '../../../axios/axiosinstance'
// ----------------------------------------------------------------------
export default function UserMoreMenu({ id, updated }) {
const ref = useRef(null);
const [isOpen, setIsOpen] = useState(false);
const [backdrop, setBackdrop] = useState(false)
const deleteHandler = (id) => {
window.confirm("Are you sure to delete?")
const body = { id: [id] }
setBackdrop(true)
axios.delete("/admin/vendor/delete", {
params: {
...body
}
}).then((response) => {
if (response.data) {
setIsOpen(false)
updated(id)
setBackdrop(false)
}
})
}
const ditHandler = () => {
console.log(id)
}
return (
<>
<Backdrop
sx={{ color: '#fff', zIndex: (theme) => theme.zIndex.drawer + 1 }}
open={backdrop}
>
<CircularProgress color="inherit" />
</Backdrop>
<IconButton ref={ref} onClick={() => setIsOpen(true)}>
<Iconify icon="eva:more-vertical-fill" width={20} height={20} />
</IconButton>
<Menu
open={isOpen}
anchorEl={ref.current}
onClose={() => setIsOpen(false)}
PaperProps={{
sx: { width: 200, maxWidth: '100%' }
}}
anchorOrigin={{ vertical: 'top', horizontal: 'right' }}
transformOrigin={{ vertical: 'top', horizontal: 'right' }}
>
<MenuItem onClick={() => { deleteHandler(id) }} sx={{ color: 'text.secondary' }}>
<ListItemIcon >
<Iconify icon="eva:trash-2-outline" width={24} height={24} />
</ListItemIcon>
<ListItemText primary="Delete" primaryTypographyProps={{ variant: 'body2' }} />
</MenuItem>
<MenuItem onClick={() => { ditHandler(id) }} component={RouterLink} to="#" sx={{ color: 'text.secondary' }}>
<ListItemIcon >
<Iconify icon="eva:edit-fill" width={24} height={24} />
</ListItemIcon>
<ListItemText primary="Edit" primaryTypographyProps={{ variant: 'body2' }} />
</MenuItem>
</Menu>
</>
);
}
|
from django.db import migrations
def forwards_func(apps, schema_editor):
SensorType = apps.get_model("api", "SensorType")
db_alias = schema_editor.connection.alias
humidity = SensorType.objects.using(db_alias).get(sensor_type_id="humidity")
humidity.sensor_type_unit = "%"
humidity.save()
def reverse_func(apps, schema_editor):
SensorType = apps.get_model("api", "SensorType")
db_alias = schema_editor.connection.alias
humidity = SensorType.objects.using(db_alias).get(sensor_type_id="humidity")
humidity.sensor_type_unit = "ml"
humidity.save()
pass
class Migration(migrations.Migration):
dependencies = [
('api', '0015_auto_20180124_1125')
]
operations = [
migrations.RunPython(forwards_func, reverse_func),
]
|
#===============================================================================
# @file protocol.py
#
# @brief Handle message protocol parsing.
#
# @author [email protected]
#
# Copyright (c) 2014 Parrot S.A.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Parrot Company nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE PARROT COMPANY BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#===============================================================================
import logging
import struct
from pomp.buffer import Buffer
_log = logging.getLogger("pomp")
# Magic bytes
POMP_MAGIC_0 = ord('P') # Magic byte 0
POMP_MAGIC_1 = ord('O') # Magic byte 1
POMP_MAGIC_2 = ord('M') # Magic byte 2
POMP_MAGIC_3 = ord('P') # Magic byte 3
# Magic value
POMP_MAGIC = (
(POMP_MAGIC_3 << 24) |
(POMP_MAGIC_2 << 16) |
(POMP_MAGIC_1 << 8) |
POMP_MAGIC_0)
# Data types
DATA_TYPE_I8 = 0x01 # 8-bit signed integer
DATA_TYPE_U8 = 0x02 # 8-bit unsigned integer
DATA_TYPE_I16 = 0x03 # 16-bit signed integer
DATA_TYPE_U16 = 0x04 # 16-bit unsigned integer
DATA_TYPE_I32 = 0x05 # 32-bit signed integer
DATA_TYPE_U32 = 0x06 # 32-bit unsigned integer
DATA_TYPE_I64 = 0x07 # 64-bit signed integer
DATA_TYPE_U64 = 0x08 # 64-bit unsigned integer
DATA_TYPE_STR = 0x09 # String
DATA_TYPE_BUF = 0x0a # Buffer
DATA_TYPE_F32 = 0x0b # 32-bit floating point
DATA_TYPE_F64 = 0x0c # 64-bit floating point
# Size of protocol header */
HEADER_SIZE = 12
#===============================================================================
#===============================================================================
class Header(object):
def __init__(self, magic, msgid, size):
self.magic = magic
self.msgid = msgid
self.size = size
#===============================================================================
#===============================================================================
class Protocol(object):
_STATE_IDLE = 0
_STATE_HEADER_MAGIC_0 = 1
_STATE_HEADER_MAGIC_1 = 2
_STATE_HEADER_MAGIC_2 = 3
_STATE_HEADER_MAGIC_3 = 4
_STATE_HEADER = 5
_STATE_PAYLOAD = 6
def __init__(self):
self.state = Protocol._STATE_IDLE
self.headerBuf = None
self.header = None
self.msg = None
self.bufSrc = None
self.offSrc = 0
self.lenSrc = 0
self._reset()
def decode(self, buf, off):
rxMsg = None
# If idle start a new parsing
if self.state == Protocol._STATE_IDLE:
self.state = Protocol._STATE_HEADER_MAGIC_0
# Setup source buffer
self.bufSrc = buf
self.offSrc = off
self.lenSrc = len(buf) - off
while self.lenSrc > 0 and self.state != Protocol._STATE_IDLE:
if self.state == Protocol._STATE_HEADER_MAGIC_0:
self._reset()
self.state = Protocol._STATE_HEADER_MAGIC_0
self._copyOne(self.headerBuf)
self._checkMagic(0, POMP_MAGIC_0, Protocol._STATE_HEADER_MAGIC_1)
elif self.state == Protocol._STATE_HEADER_MAGIC_1:
self._copyOne(self.headerBuf)
self._checkMagic(1, POMP_MAGIC_1, Protocol._STATE_HEADER_MAGIC_2)
elif self.state == Protocol._STATE_HEADER_MAGIC_2:
self._copyOne(self.headerBuf)
self._checkMagic(2, POMP_MAGIC_2, Protocol._STATE_HEADER_MAGIC_3)
elif self.state == Protocol._STATE_HEADER_MAGIC_3:
self._copyOne(self.headerBuf)
self._checkMagic(3, POMP_MAGIC_3, Protocol._STATE_HEADER)
elif self.state == Protocol._STATE_HEADER:
self._copy(self.headerBuf, HEADER_SIZE)
if len(self.headerBuf) == HEADER_SIZE:
self._decodeHeader()
elif self.state == Protocol._STATE_PAYLOAD:
self._copy(self.msg.buf, self.header.size)
else:
assert False
# Check end of payload
if (self.state == Protocol._STATE_PAYLOAD
and len(self.msg.buf) == self.header.size):
# Give ownership of message to caller
self.msg.setFinished()
rxMsg = self.msg
self.msg = None
self.state = Protocol._STATE_IDLE
return (self.offSrc, rxMsg)
def _reset(self):
self.state = Protocol._STATE_IDLE
self.headerBuf = Buffer()
self.header = None
self.msg = None
def _checkMagic(self, idx, val, state):
if isinstance(self.headerBuf.getData(), str):
magic = ord(self.headerBuf.getData()[idx])
else:
magic = self.headerBuf.getData()[idx]
if magic != val:
_log.warning("Bad header magic %d: 0x%02x(0x%02x)", idx, magic, val)
self.state = Protocol._STATE_HEADER_MAGIC_0
else:
self.state = state
def _copyOne(self, bufDst):
bufDst.write(self.bufSrc[self.offSrc:self.offSrc+1])
self.offSrc += 1
self.lenSrc -= 1
def _copy(self, bufDst, sizeDst):
cpyLen = min(self.lenSrc, sizeDst - len(bufDst))
bufDst.write(self.bufSrc[self.offSrc:self.offSrc+cpyLen])
self.offSrc += cpyLen
self.lenSrc -= cpyLen
def _decodeHeader(self):
try:
# Decode header fields
self.headerBuf.rewind()
magic = self.headerBuf.readInt()
msgid = self.headerBuf.readInt()
size = self.headerBuf.readInt()
self.header = Header(magic, msgid, size)
# Check header and setup payload decoding */
if self.header.size < HEADER_SIZE:
_log.warning("Bad header size: %d", self.header.size)
self.state = Protocol._STATE_HEADER_MAGIC_0
else:
self._allocMsg(self.header.msgid, self.header.size)
self.msg.buf.write(self.headerBuf.getData())
self.state = Protocol._STATE_PAYLOAD
except struct.error as ex:
_log.error(ex)
self.state = Protocol._STATE_HEADER_MAGIC_0
def _allocMsg(self, msgid, size):
from pomp.message import Message
self.msg = Message()
self.msg.init(msgid)
|
import logging
import tensorflow as tf
from absl import logging as absl_logging
def get_logger():
"""Retrieves tensorflow logger and changes log formatting."""
formatting = "%(asctime)s: %(levelname)s %(filename)s:%(lineno)s] %(message)s"
formatter = logging.Formatter(formatting)
absl_logging.get_absl_handler().setFormatter(formatter)
for h in tf.get_logger().handlers:
h.setFormatter(formatter)
logger = tf.get_logger()
logger.setLevel(logging.INFO)
return logger
|
const square = require("./square");
test ("square of 1 equals 1 * 1 =1", () => {
expect ( square(1)).toBe(1);
})
test ("square of 2 equals 2 *2 = 4", () => {
expect (square(2)).toBe(4);
}) |
import React from 'react';
import { Jumbotron, Container, CardColumns, Card, Button } from 'react-bootstrap';
import { useQuery, useMutation } from '@apollo/client';
import { GET_ME } from '../utils/queries';
import { REMOVE_BOOK } from '../utils/mutations';
import { removeBookId } from '../utils/localStorage';
import Auth from '../utils/auth';
const SavedBooks = () => {
const { loading, data } = useQuery(GET_ME);
const [removeBook, { error }] = useMutation(REMOVE_BOOK);
const userData = data?.me || {};
// create function that accepts the book's _id value as param and deletes the book from the database
const handleDeleteBook = async (bookId) => {
// get token
const token = Auth.loggedIn() ? Auth.getToken() : null;
if (!token) {
return false;
}
try {
await removeBook({
variables: { bookId },
});
// upon success, remove book's id from localStorage
removeBookId(bookId);
} catch (err) {
console.error(err);
}
};
// if data isn't here yet, say so
if (loading) {
return <h2>LOADING...</h2>;
}
return (
<>
<Jumbotron fluid className='text-light bg-dark'>
<Container>
<h1>Viewing {userData.username}'s books!</h1>
</Container>
</Jumbotron>
<Container>
<h2>
{userData.savedBooks?.length
? `Viewing ${userData.savedBooks.length} saved ${
userData.savedBooks.length === 1 ? 'book' : 'books'
}:`
: 'You have no saved books!'}
</h2>
<CardColumns>
{userData.savedBooks?.map((book) => {
return (
<Card key={book.bookId} border='dark'>
{book.image ? (
<Card.Img src={book.image} alt={`The cover for ${book.title}`} variant='top' />
) : null}
<Card.Body>
<Card.Title>{book.title}</Card.Title>
<p className='small'>Authors: {book.authors}</p>
<Card.Text>{book.description}</Card.Text>
<Button
className='btn-block btn-danger'
onClick={() => handleDeleteBook(book.bookId)}>
Delete this Book!
</Button>
{error && <span className="ml-2">Something went wrong...</span>}
</Card.Body>
</Card>
);
})}
</CardColumns>
</Container>
</>
);
};
export default SavedBooks;
|
"""Module containing the internal data representation for an install configuration and install module.
"""
|
"""test utils"""
from django.shortcuts import reverse
from django.test import TestCase
from django.test.client import RequestFactory
from pyazo.root.celery import after_task_publish, config_loggers
from pyazo.utils import get_client_ip, get_reverse_dns
class UtilsTest(TestCase):
"""Test utils"""
def setUp(self):
self.factory = RequestFactory()
def test_remote_ip(self):
"""test get_client_ip"""
with self.assertRaises(ValueError):
get_client_ip(None)
request = self.factory.get(reverse("index"))
request.META["REMOTE_ADDR"] = "aa"
self.assertEqual(get_client_ip(request), "aa")
def test_reverse_dns(self):
"""Test get_reverse_dns"""
self.assertEqual(get_reverse_dns("erqwer"), "")
def test_celery(self):
"""Test celery setup"""
config_loggers()
after_task_publish(headers={}, body={})
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Views for CCDC file documetnation pages.
"""
# Django tricks
from django.apps import apps
from django.http import Http404
from django.urls import reverse
from calaccess_website.templatetags.calaccess_website_tags import slugify
# Models
from calaccess_processed.models import ProcessedDataFile
# Views
from calaccess_website.views import CalAccessModelListMixin
from django.views.generic import DetailView, ListView
def get_ocd_proxy_models():
"""
Return an iterable of all OCD proxy models from the processed_data app.
"""
election_proxies = apps.get_app_config('calaccess_processed_elections').get_ocd_models_map().values()
flat_proxies = apps.get_app_config("calaccess_processed_flatfiles").get_flat_proxy_list()
return list(election_proxies) + list(flat_proxies)
def get_processed_data_files():
"""
Return a tuple of ProcessedDataFile instances for published files.
"""
file_list = [ProcessedDataFile(file_name=m().file_name) for m in get_ocd_proxy_models()]
return sorted(file_list, key=lambda f: f.file_name)
class CcdcFileList(ListView, CalAccessModelListMixin):
template_name = 'calaccess_website/docs/ccdc/file_list.html'
def get_queryset(self):
"""
Returns the CCDC model list with grouped by type.
"""
return self.regroup_by_klass_group(get_processed_data_files())
def get_context_data(self, **kwargs):
context = super(CcdcFileList, self).get_context_data(**kwargs)
context['file_num'] = len(get_processed_data_files())
context['title'] = 'Processed files'
context['description'] = 'Definitions, record layouts and data dictionaries for the \
processed data files released by the California Civic Data Coalition. Recommended for beginners and regular use.'
return context
class BaseFileDetailView(DetailView):
"""
Base class for views providing information about a CCDC data file.
"""
def get_queryset(self):
"""
Returns a list of the ccdc data files as a key dictionary
with the URL slug as the keys.
"""
return dict((slugify(f.file_name), f) for f in get_processed_data_files())
def set_kwargs(self, obj):
self.kwargs = {
'slug': obj
}
def get_object(self):
"""
Returns the file model from the CAL-ACCESS processed data app that
matches the provided slug.
Raises a 404 error if one is not found
"""
key = self.kwargs['slug']
try:
return self.get_queryset()[key.lower()]
except KeyError:
raise Http404
def get_context_data(self, **kwargs):
"""
Add some extra bits to the template's context
"""
file_name = self.kwargs['slug'].replace("-", "")
context = super(BaseFileDetailView, self).get_context_data(**kwargs)
# Pull all previous versions of the provided file
context['version_list'] = ProcessedDataFile.objects.filter(
file_name__icontains=file_name
).order_by(
'-version__raw_version__release_datetime'
).exclude(
version__raw_version__release_datetime__lte='2016-07-27'
)
# note if the most recent version of the file is empty
try:
context['empty'] = context['version_list'][0].records_count == 0
except IndexError:
context['empty'] = True
return context
class CcdcFileDownloadsList(BaseFileDetailView):
"""
A detail page with links to all downloads for the provided CCDC data file.
"""
template_name = 'calaccess_website/docs/ccdc/download_list.html'
def get_url(self, obj):
return reverse('ccdc_file_downloads_list', kwargs=dict(slug=obj))
class CcdcFileDetail(BaseFileDetailView):
"""
A detail page with all documentation for the provided CCDC data file.
"""
template_name = 'calaccess_website/docs/ccdc/file_detail.html'
def get_url(self, obj):
return reverse('ccdc_file_detail', kwargs=dict(slug=obj))
def get_context_data(self, **kwargs):
"""
Add some extra bits to the template's context
"""
context = super(CcdcFileDetail, self).get_context_data(**kwargs)
# Add list of fields to context
context['fields'] = self.get_sorted_fields()
return context
def get_sorted_fields(self):
"""
Return a list of fields (dicts) sorted by name.
"""
field_list = []
for field in self.object.model().get_field_list():
field_data = {
'column': field.name,
'description': field.description,
'help_text': field.help_text,
}
if field.choices and len(field.choices) > 0:
field_data['choices'] = [c for c in field.choices]
else:
field_data['choices'] = None
field_list.append(field_data)
return sorted(field_list, key=lambda k: k['column'])
|
var searchData=
[
['emg',['emg',['../structst__meas.html#a3d1b5e40ddbb5e34307f6b8e9cc116ff',1,'st_meas']]],
['emg_5fcalibration_5fflag',['emg_calibration_flag',['../structst__mem.html#a78f0b0c6db2a7118cd15ec0aa38ccdb9',1,'st_mem']]],
['emg_5fmax_5fvalue',['emg_max_value',['../structst__mem.html#aeb71cf2bff2584abb616d4b5dcc4c4af',1,'st_mem']]],
['emg_5fspeed',['emg_speed',['../structst__mem.html#ae8e800591064bf14eb2dd3a3fb2c325b',1,'st_mem']]],
['emg_5fthreshold',['emg_threshold',['../structst__mem.html#aaeb84b2fd1a137ee9234fd3c24c97aaa',1,'st_mem']]],
['enabled',['enabled',['../structst__calib.html#a2333939d3e81212884efcaa5bb1c1ec1',1,'st_calib']]]
];
|
# Python 3.8.3
from math import ceil
def get_input():
with open("input.txt", "r") as f:
lines = f.read().split()
earliest = int(lines[0])
ids = lines[1].split(",")
return earliest, [int(i) for i in ids if i.isnumeric()]
def main():
earliest_time, bus_ids = get_input()
departure_times = {}
for bus_id in bus_ids:
wait = ceil(earliest_time / bus_id) * bus_id - earliest_time
departure_times[wait] = bus_id
shortest_wait = min(departure_times.keys())
return shortest_wait * departure_times[shortest_wait]
if __name__ == "__main__":
import time
start = time.perf_counter()
print(main())
print(time.perf_counter() - start)
|
const express = require('express');
const cluster = require('cluster');
const prom = require('prom-client');
const config = require('./config');
const logger = require('./logger');
const server = express();
const Registry = prom.Registry;
const AggregatorRegistry = prom.AggregatorRegistry;
const aggregatorRegistry = new AggregatorRegistry();
const cmRegister = new Registry();
if (cluster.isMaster) {
server.get('/metrics', (req, res) => {
aggregatorRegistry.clusterMetrics((err, metrics) => {
if (err) logger.error(err);
res.set('Content-Type', aggregatorRegistry.contentType);
res.send(metrics);
});
});
server.get('/cm_metrics', (req, res) => {
res.set('Content-Type', cmRegister.contentType);
res.end(cmRegister.metrics());
});
cmRegister.setDefaultLabels({ server: config.stateName.toLowerCase() });
prom.collectDefaultMetrics({ register: cmRegister, prefix: 'dyno_cm_' });
server.listen(3001);
} else {
const defaultLabels = { clusterId: process.env.clusterId, server: config.stateName.toLowerCase() };
prom.register.setDefaultLabels(defaultLabels);
prom.collectDefaultMetrics({ prefix: 'dyno_app_' });
const messagesCounter = new prom.Counter({
name: 'dyno_app_messages_sent',
help: 'Counts messages sent (type = dm|normal|webhook)',
labelNames: ['type'],
});
const helpSentCounter = new prom.Counter({
name: 'dyno_app_help_sent',
help: 'Counts helps sent',
});
const helpFailedCounter = new prom.Counter({
name: 'dyno_app_help_failed',
help: 'Counts helps failed',
});
const guildsCarbon = new prom.Gauge({
name: 'dyno_app_guilds_carbon',
help: 'Guild count for Dyno',
});
const guildEvents = new prom.Counter({
name: 'dyno_app_guild_events',
help: 'Guild events counter (type = create, delete, etc)',
labelNames: ['type'],
});
const guildCounts = new prom.Gauge({
name: 'dyno_app_guild_count',
help: 'Guild count based on cluster id',
});
const userCounts = new prom.Gauge({
name: 'dyno_app_user_count',
help: 'User count based on cluster id',
});
const gatewayEvents = new prom.Gauge({
name: 'dyno_app_gateway_events',
help: 'GW Event counter (type = event type)',
labelNames: ['type'],
});
const messageEvents = new prom.Counter({
name: 'dyno_app_message_events',
help: 'Message events counter (type = create, delete, etc)',
labelNames: ['type'],
});
const discordShard = new prom.Counter({
name: 'dyno_app_discord_shard',
help: 'Discord shard status (type = connect, disconnect, resume, etc)',
labelNames: ['type'],
});
const commandSuccess = new prom.Counter({
name: 'dyno_app_command_success',
help: 'Command success counter (group = cmd group, name = cmd name)',
labelNames: ['group', 'name'],
});
const commandError = new prom.Counter({
name: 'dyno_app_command_error',
help: 'Command error counter (group = cmd group, name = cmd name)',
labelNames: ['group', 'name'],
});
const commandTimings = new prom.Histogram({
name: 'dyno_app_command_time',
help: 'Command timing histogram (group = cmd group, name = cmd name)',
labelNames: ['group', 'name'],
buckets: [100, 200, 300, 500, 800, 1000, 5000],
});
const purgeSuccessCounter = new prom.Counter({
name: 'dyno_app_purge_success',
help: 'Counts successful purges',
});
const purgeFailedCounter = new prom.Counter({
name: 'dyno_app_purge_failed',
help: 'Counts failed purges',
});
const eventLoopBlockCounter = new prom.Counter({
name: 'dyno_app_node_blocked',
help: 'Counts node event loop blocks',
});
const musicPlaylists = new prom.Counter({
name: 'dyno_app_music_playlists',
help: 'Counts music playlists',
});
const musicAdds = new prom.Counter({
name: 'dyno_app_music_adds',
help: 'Counts music adds',
});
const voiceTotals = new prom.Gauge({
name: 'dyno_app_voice_total',
help: 'Voice totals gauge',
labelNames: ['state'],
});
const voicePlaying = new prom.Gauge({
name: 'dyno_app_voice_playing',
help: 'Voice playing gauge',
labelNames: ['state'],
});
// Music module metrics
const musicModuleMetrics = [
new prom.Counter({
name: 'dyno_app_music_total_user_listen_time',
help: 'Music module metrics',
}),
new prom.Counter({
name: 'dyno_app_music_total_playing_time',
help: 'Music module metrics',
}),
new prom.Counter({
name: 'dyno_app_music_song_ends',
help: 'Music module metrics',
}),
new prom.Counter({
name: 'dyno_app_music_partial_song_ends',
help: 'Music module metrics',
}),
new prom.Counter({
name: 'dyno_app_music_unique_session_joins',
help: 'Music module metrics',
}),
new prom.Counter({
name: 'dyno_app_music_disconnects',
help: 'Music module metrics',
}),
new prom.Counter({
name: 'dyno_app_music_joins',
help: 'Music module metrics',
}),
new prom.Counter({
name: 'dyno_app_music_leaves',
help: 'Music module metrics',
}),
new prom.Counter({
name: 'dyno_app_music_plays',
help: 'Music module metrics',
}),
new prom.Counter({
name: 'dyno_app_music_search',
help: 'Music module metrics',
}),
new prom.Counter({
name: 'dyno_app_music_skips',
help: 'Music module metrics',
}),
new prom.Summary({
name: 'dyno_app_music_user_session_summary',
help: 'Music module metrics',
}),
new prom.Summary({
name: 'dyno_app_music_session_summary',
help: 'Music module metrics',
}),
];
}
|
"""
Unit testing for BOM export functionality
"""
import csv
from django.test import TestCase
from django.urls import reverse
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
class BomExportTest(TestCase):
fixtures = [
'category',
'part',
'location',
'bom',
]
def setUp(self):
super().setUp()
# Create a user
user = get_user_model()
self.user = user.objects.create_user(
username='username',
email='[email protected]',
password='password'
)
# Put the user into a group with the correct permissions
group = Group.objects.create(name='mygroup')
self.user.groups.add(group)
# Give the group *all* the permissions!
for rule in group.rule_sets.all():
rule.can_view = True
rule.can_change = True
rule.can_add = True
rule.can_delete = True
rule.save()
self.client.login(username='username', password='password')
self.url = reverse('bom-download', kwargs={'pk': 100})
def test_bom_template(self):
"""
Test that the BOM template can be downloaded from the server
"""
url = reverse('bom-upload-template')
# Download an XLS template
response = self.client.get(url, data={'format': 'xls'})
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.headers['Content-Disposition'],
'attachment; filename="InvenTree_BOM_Template.xls"'
)
# Return a simple CSV template
response = self.client.get(url, data={'format': 'csv'})
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.headers['Content-Disposition'],
'attachment; filename="InvenTree_BOM_Template.csv"'
)
filename = '_tmp.csv'
with open(filename, 'wb') as f:
f.write(response.getvalue())
with open(filename, 'r') as f:
reader = csv.reader(f, delimiter=',')
for line in reader:
headers = line
break
expected = [
'part_id',
'part_ipn',
'part_name',
'quantity',
'optional',
'overage',
'reference',
'note',
'inherited',
'allow_variants',
]
# Ensure all the expected headers are in the provided file
for header in expected:
self.assertTrue(header in headers)
def test_export_csv(self):
"""
Test BOM download in CSV format
"""
params = {
'file_format': 'csv',
'cascade': True,
'parameter_data': True,
'stock_data': True,
'supplier_data': True,
'manufacturer_data': True,
}
response = self.client.get(self.url, data=params)
self.assertEqual(response.status_code, 200)
content = response.headers['Content-Disposition']
self.assertEqual(content, 'attachment; filename="BOB | Bob | A2_BOM.csv"')
filename = '_tmp.csv'
with open(filename, 'wb') as f:
f.write(response.getvalue())
# Read the file
with open(filename, 'r') as f:
reader = csv.reader(f, delimiter=',')
for line in reader:
headers = line
break
expected = [
'level',
'bom_id',
'parent_part_id',
'parent_part_ipn',
'parent_part_name',
'part_id',
'part_ipn',
'part_name',
'part_description',
'sub_assembly',
'quantity',
'optional',
'overage',
'reference',
'note',
'inherited',
'allow_variants',
'Default Location',
'Available Stock',
]
for header in expected:
self.assertTrue(header in headers)
for header in headers:
self.assertTrue(header in expected)
def test_export_xls(self):
"""
Test BOM download in XLS format
"""
params = {
'file_format': 'xls',
'cascade': True,
'parameter_data': True,
'stock_data': True,
'supplier_data': True,
'manufacturer_data': True,
}
response = self.client.get(self.url, data=params)
self.assertEqual(response.status_code, 200)
content = response.headers['Content-Disposition']
self.assertEqual(content, 'attachment; filename="BOB | Bob | A2_BOM.xls"')
def test_export_xlsx(self):
"""
Test BOM download in XLSX format
"""
params = {
'file_format': 'xlsx',
'cascade': True,
'parameter_data': True,
'stock_data': True,
'supplier_data': True,
'manufacturer_data': True,
}
response = self.client.get(self.url, data=params)
self.assertEqual(response.status_code, 200)
def test_export_json(self):
"""
Test BOM download in JSON format
"""
params = {
'file_format': 'json',
'cascade': True,
'parameter_data': True,
'stock_data': True,
'supplier_data': True,
'manufacturer_data': True,
}
response = self.client.get(self.url, data=params)
self.assertEqual(response.status_code, 200)
content = response.headers['Content-Disposition']
self.assertEqual(content, 'attachment; filename="BOB | Bob | A2_BOM.json"')
|
const btn = document.querySelector("#Speak");
const text = document.querySelector("#userText");
var SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition
function Random(maximum){
Math.floor(Math.random() * maximum);
}
const recognition = new SpeechRecognition();
const greetingCommands = ["Hello", "Hi", "Heya", "Howdy", "Namaste","Hola", "Hey", "Hey there"]
recognition.onstart = () => {
console.log("Please Speak!")
}
recognition.onresult = (event) => {
const crntText = event.resultIndex;
const transcript = event.results[crntText][0].transcript;
console.log(transcript)
if(greetingCommands.includes = transcript){
responsiveVoice.speak("true")
// console.log(greetingCommands[Random(greetingCommands.length)] + " User");
}
}
// const day = new Date();
// var h = day.getHours();
btn.addEventListener("click",() => {
recognition.start();
}) |
#!/usr/bin/env python3
"""Defines the Review class."""
from models.base_model import Base, BaseModel
from sqlalchemy import Column, ForeignKey, String
from sqlalchemy.orm import relationship
class Review(BaseModel, Base):
"""Represents a review for a MySQL database.
Inherits from SQLAlchemy Base and links to the MySQL table reviews.
Attributes:
__tablename__ (str): The name of the MySQL table to store Reviews.
text (sqlalchemy String): The review description.
place_id (sqlalchemy String): The review's place id.
user_id (sqlalchemy String): The review's user id.
"""
__tablename__ = "reviews"
text = Column(String(1024), nullable=False)
place_id = Column(String(60), ForeignKey("places.id"), nullable=False)
user_id = Column(String(60), ForeignKey("users.id"), nullable=False)
|
/**
* DevExtreme (esm/ui/form/components/label.js)
* Version: 21.2.5
* Build date: Mon Jan 17 2022
*
* Copyright (c) 2012 - 2022 Developer Express Inc. ALL RIGHTS RESERVED
* Read about DevExtreme licensing here: https://js.devexpress.com/Licensing/
*/
import _objectWithoutPropertiesLoose from "@babel/runtime/helpers/esm/objectWithoutPropertiesLoose";
var _excluded = ["$FIELD_ITEM_LABEL_CONTENT_CLASS"];
import $ from "../../../core/renderer";
import {
isDefined
} from "../../../core/utils/type";
import {
isEmpty
} from "../../../core/utils/string";
import {
getLabelMarkText
} from "../ui.form.layout_manager.utils";
import {
WIDGET_CLASS,
FIELD_ITEM_LABEL_CONTENT_CLASS,
FIELD_ITEM_LABEL_CLASS
} from "../constants";
export var GET_LABEL_WIDTH_BY_TEXT_CLASS = "dx-layout-manager-hidden-label";
export var FIELD_ITEM_REQUIRED_MARK_CLASS = "dx-field-item-required-mark";
export var FIELD_ITEM_LABEL_LOCATION_CLASS = "dx-field-item-label-location-";
export var FIELD_ITEM_OPTIONAL_MARK_CLASS = "dx-field-item-optional-mark";
export var FIELD_ITEM_LABEL_TEXT_CLASS = "dx-field-item-label-text";
export function renderLabel(_ref) {
var {
text: text,
id: id,
location: location,
alignment: alignment,
labelID: labelID = null,
markOptions: markOptions = {}
} = _ref;
if (!isDefined(text) || text.length <= 0) {
return null
}
return $("<label>").addClass(FIELD_ITEM_LABEL_CLASS + " " + FIELD_ITEM_LABEL_LOCATION_CLASS + location).attr("for", id).attr("id", labelID).css("textAlign", alignment).append($("<span>").addClass(FIELD_ITEM_LABEL_CONTENT_CLASS).append($("<span>").addClass(FIELD_ITEM_LABEL_TEXT_CLASS).text(text), _renderLabelMark(markOptions)))
}
function _renderLabelMark(markOptions) {
var markText = getLabelMarkText(markOptions);
if ("" === markText) {
return null
}
return $("<span>").addClass(markOptions.showRequiredMark ? FIELD_ITEM_REQUIRED_MARK_CLASS : FIELD_ITEM_OPTIONAL_MARK_CLASS).text(markText)
}
export function setLabelWidthByMaxLabelWidth($targetContainer, labelsSelector, labelMarkOptions) {
var FIELD_ITEM_LABEL_CONTENT_CLASS_Selector = "".concat(labelsSelector, " > .").concat(FIELD_ITEM_LABEL_CLASS, ":not(.").concat(FIELD_ITEM_LABEL_LOCATION_CLASS, "top) > .").concat(FIELD_ITEM_LABEL_CONTENT_CLASS);
var $FIELD_ITEM_LABEL_CONTENT_CLASS_Items = $targetContainer.find(FIELD_ITEM_LABEL_CONTENT_CLASS_Selector);
var FIELD_ITEM_LABEL_CONTENT_CLASS_Length = $FIELD_ITEM_LABEL_CONTENT_CLASS_Items.length;
var labelWidth;
var i;
var maxWidth = 0;
for (i = 0; i < FIELD_ITEM_LABEL_CONTENT_CLASS_Length; i++) {
labelWidth = getLabelWidthByInnerHTML({
$FIELD_ITEM_LABEL_CONTENT_CLASS: $FIELD_ITEM_LABEL_CONTENT_CLASS_Items[i],
location: "left",
markOptions: labelMarkOptions
});
if (labelWidth > maxWidth) {
maxWidth = labelWidth
}
}
for (i = 0; i < FIELD_ITEM_LABEL_CONTENT_CLASS_Length; i++) {
$FIELD_ITEM_LABEL_CONTENT_CLASS_Items[i].style.width = maxWidth + "px"
}
}
function getLabelWidthByInnerHTML(options) {
var {
$FIELD_ITEM_LABEL_CONTENT_CLASS: $FIELD_ITEM_LABEL_CONTENT_CLASS
} = options, renderLabelOptions = _objectWithoutPropertiesLoose(options, _excluded);
var $hiddenContainer = $("<div>").addClass(WIDGET_CLASS).addClass(GET_LABEL_WIDTH_BY_TEXT_CLASS).appendTo("body");
renderLabelOptions.text = " ";
var $label = renderLabel(renderLabelOptions).appendTo($hiddenContainer);
var labelTextElement = $label.find("." + FIELD_ITEM_LABEL_TEXT_CLASS)[0];
labelTextElement.innerHTML = getLabelInnerHTML($FIELD_ITEM_LABEL_CONTENT_CLASS);
var result = labelTextElement.offsetWidth;
$hiddenContainer.remove();
return result
}
function getLabelInnerHTML($FIELD_ITEM_LABEL_CONTENT_CLASS) {
var length = $FIELD_ITEM_LABEL_CONTENT_CLASS.children.length;
var child;
var result = "";
var i;
for (i = 0; i < length; i++) {
child = $FIELD_ITEM_LABEL_CONTENT_CLASS.children[i];
result += !isEmpty(child.innerText) ? child.innerText : child.innerHTML
}
return result
}
|
# Copyright 2020 The Magenta Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Abstract class for sequence generators.
Provides a uniform interface for interacting with generators for any model.
"""
import abc
import os
import tempfile
from note_seq.protobuf import generator_pb2
import tensorflow.compat.v1 as tf
class SequenceGeneratorError(Exception): # pylint:disable=g-bad-exception-name
"""Generic exception for sequence generation errors."""
pass
# TODO(adarob): Replace with tf.saver.checkpoint_file_exists when released.
def _checkpoint_file_exists(checkpoint_file_or_prefix):
"""Returns True if checkpoint file or files (for V2) exist."""
return (tf.gfile.Exists(checkpoint_file_or_prefix) or
tf.gfile.Exists(checkpoint_file_or_prefix + '.index'))
class BaseSequenceGenerator(object):
"""Abstract class for generators."""
__metaclass__ = abc.ABCMeta
def __init__(self, model, details, checkpoint, bundle):
"""Constructs a BaseSequenceGenerator.
Args:
model: An instance of BaseModel.
details: A generator_pb2.GeneratorDetails for this generator.
checkpoint: Where to look for the most recent model checkpoint. Either a
directory to be used with tf.train.latest_checkpoint or the path to a
single checkpoint file. Or None if a bundle should be used.
bundle: A generator_pb2.GeneratorBundle object that contains both a
checkpoint and a metagraph. Or None if a checkpoint should be used.
Raises:
SequenceGeneratorError: if neither checkpoint nor bundle is set.
"""
self._model = model
self._details = details
self._checkpoint = checkpoint
self._bundle = bundle
if self._checkpoint is None and self._bundle is None:
raise SequenceGeneratorError(
'Either checkpoint or bundle must be set')
if self._checkpoint is not None and self._bundle is not None:
raise SequenceGeneratorError(
'Checkpoint and bundle cannot both be set')
if self._bundle:
if self._bundle.generator_details.id != self._details.id:
raise SequenceGeneratorError(
'Generator id in bundle (%s) does not match this generator\'s id '
'(%s)' % (self._bundle.generator_details.id,
self._details.id))
self._initialized = False
@property
def details(self):
"""Returns a GeneratorDetails description of this generator."""
return self._details
@property
def bundle_details(self):
"""Returns the BundleDetails or None if checkpoint was used."""
if self._bundle is None:
return None
return self._bundle.bundle_details
@abc.abstractmethod
def _generate(self, input_sequence, generator_options):
"""Implementation for sequence generation based on sequence and options.
The implementation can assume that _initialize has been called before this
method is called.
Args:
input_sequence: An input NoteSequence to base the generation on.
generator_options: A GeneratorOptions proto with options to use for
generation.
Returns:
The generated NoteSequence proto.
"""
pass
def initialize(self):
"""Builds the TF graph and loads the checkpoint.
If the graph has already been initialized, this is a no-op.
Raises:
SequenceGeneratorError: If the checkpoint cannot be found.
"""
if self._initialized:
return
# Either self._checkpoint or self._bundle should be set.
# This is enforced by the constructor.
if self._checkpoint is not None:
# Check if the checkpoint file exists.
if not _checkpoint_file_exists(self._checkpoint):
raise SequenceGeneratorError(
'Checkpoint path does not exist: %s' % (self._checkpoint))
checkpoint_file = self._checkpoint
# If this is a directory, try to determine the latest checkpoint in it.
if tf.gfile.IsDirectory(checkpoint_file):
checkpoint_file = tf.train.latest_checkpoint(checkpoint_file)
if checkpoint_file is None:
raise SequenceGeneratorError(
'No checkpoint file found in directory: %s' % self._checkpoint)
if (not _checkpoint_file_exists(self._checkpoint) or
tf.gfile.IsDirectory(checkpoint_file)):
raise SequenceGeneratorError(
'Checkpoint path is not a file: %s (supplied path: %s)' % (
checkpoint_file, self._checkpoint))
self._model.initialize_with_checkpoint(checkpoint_file)
else:
# Write checkpoint and metagraph files to a temp dir.
tempdir = None
try:
tempdir = tempfile.mkdtemp()
checkpoint_filename = os.path.join(tempdir, 'model.ckpt')
with tf.gfile.Open(checkpoint_filename, 'wb') as f:
# For now, we support only 1 checkpoint file.
# If needed, we can later change this to support sharded checkpoints.
f.write(self._bundle.checkpoint_file[0])
metagraph_filename = os.path.join(tempdir, 'model.ckpt.meta')
with tf.gfile.Open(metagraph_filename, 'wb') as f:
f.write(self._bundle.metagraph_file)
self._model.initialize_with_checkpoint_and_metagraph(
checkpoint_filename, metagraph_filename)
finally:
# Clean up the temp dir.
if tempdir is not None:
tf.gfile.DeleteRecursively(tempdir)
self._initialized = True
def close(self):
"""Closes the TF session.
If the session was already closed, this is a no-op.
"""
if self._initialized:
self._model.close()
self._initialized = False
def __enter__(self):
"""When used as a context manager, initializes the TF session."""
self.initialize()
return self
def __exit__(self, *args):
"""When used as a context manager, closes the TF session."""
self.close()
def generate(self, input_sequence, generator_options):
"""Generates a sequence from the model based on sequence and options.
Also initializes the TF graph if not yet initialized.
Args:
input_sequence: An input NoteSequence to base the generation on.
generator_options: A GeneratorOptions proto with options to use for
generation.
Returns:
The generated NoteSequence proto.
"""
self.initialize()
return self._generate(input_sequence, generator_options)
def create_bundle_file(self, bundle_file, bundle_description=None):
"""Writes a generator_pb2.GeneratorBundle file in the specified location.
Saves the checkpoint, metagraph, and generator id in one file.
Args:
bundle_file: Location to write the bundle file.
bundle_description: A short, human-readable string description of this
bundle.
Raises:
SequenceGeneratorError: if there is an error creating the bundle file.
"""
if not bundle_file:
raise SequenceGeneratorError('Bundle file location not specified.')
if not self.details.id:
raise SequenceGeneratorError(
'Generator id must be included in GeneratorDetails when creating '
'a bundle file.')
if not self.details.description:
tf.logging.warn('Writing bundle file with no generator description.')
if not bundle_description:
tf.logging.warn('Writing bundle file with no bundle description.')
self.initialize()
tempdir = None
try:
tempdir = tempfile.mkdtemp()
checkpoint_filename = os.path.join(tempdir, 'model.ckpt')
self._model.write_checkpoint_with_metagraph(checkpoint_filename)
if not os.path.isfile(checkpoint_filename):
raise SequenceGeneratorError(
'Could not read checkpoint file: %s' % (checkpoint_filename))
metagraph_filename = checkpoint_filename + '.meta'
if not os.path.isfile(metagraph_filename):
raise SequenceGeneratorError(
'Could not read metagraph file: %s' % (metagraph_filename))
bundle = generator_pb2.GeneratorBundle()
bundle.generator_details.CopyFrom(self.details)
if bundle_description:
bundle.bundle_details.description = bundle_description
with tf.gfile.Open(checkpoint_filename, 'rb') as f:
bundle.checkpoint_file.append(f.read())
with tf.gfile.Open(metagraph_filename, 'rb') as f:
bundle.metagraph_file = f.read()
with tf.gfile.Open(bundle_file, 'wb') as f:
f.write(bundle.SerializeToString())
finally:
if tempdir is not None:
tf.gfile.DeleteRecursively(tempdir)
|
import { Socket } from 'dgram';
export default class InterruptListener extends Socket {
constructor() {
super(...arguments);
}
}
|
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import d3 from 'd3';
import _ from 'lodash';
import { VislibVisualizationsPointSeriesProvider } from './_point_series';
export function VislibVisualizationsLineChartProvider(Private) {
const PointSeries = Private(VislibVisualizationsPointSeriesProvider);
const defaults = {
mode: 'normal',
showCircles: true,
radiusRatio: 9,
showLines: true,
interpolate: 'linear',
lineWidth: 2,
color: undefined,
fillColor: undefined
};
/**
* Line Chart Visualization
*
* @class LineChart
* @constructor
* @extends Chart
* @param handler {Object} Reference to the Handler Class Constructor
* @param el {HTMLElement} HTML element to which the chart will be appended
* @param chartData {Object} Elasticsearch query results for this specific chart
*/
class LineChart extends PointSeries {
constructor(handler, chartEl, chartData, seriesConfigArgs) {
super(handler, chartEl, chartData, seriesConfigArgs);
this.seriesConfig = _.defaults(seriesConfigArgs || {}, defaults);
}
addCircles(svg, data) {
const self = this;
const showCircles = this.seriesConfig.showCircles;
const color = this.handler.data.getColorFunc();
const xScale = this.getCategoryAxis().getScale();
const yScale = this.getValueAxis().getScale();
const ordered = this.handler.data.get('ordered');
const tooltip = this.baseChart.tooltip;
const isTooltip = this.handler.visConfig.get('tooltip.show');
const isHorizontal = this.getCategoryAxis().axisConfig.isHorizontal();
const lineWidth = this.seriesConfig.lineWidth;
const radii = this.baseChart.radii;
const radiusStep = ((radii.max - radii.min) || (radii.max * 100)) / Math.pow(this.seriesConfig.radiusRatio, 2);
const layer = svg.append('g')
.attr('class', 'points line')
.attr('clip-path', 'url(#' + this.baseChart.clipPathId + ')');
const circles = layer
.selectAll('circle')
.data(function appendData() {
return data.values.filter(function (d) {
return !_.isNull(d.y);
});
});
circles
.exit()
.remove();
function cx(d) {
if (ordered && ordered.date) {
return xScale(d.x);
}
return xScale(d.x) + xScale.rangeBand() / 2;
}
function cy(d) {
const y0 = d.y0 || 0;
const y = d.y || 0;
return yScale(y0 + y);
}
function cColor(d) {
return color(d.series);
}
function colorCircle(d) {
const parent = d3.select(this).node().parentNode;
const lengthOfParent = d3.select(parent).data()[0].length;
const isVisible = (lengthOfParent === 1);
// If only 1 point exists, show circle
if (!showCircles && !isVisible) return 'none';
return cColor(d);
}
function getCircleRadiusFn(modifier) {
return function getCircleRadius(d) {
const width = self.baseChart.chartConfig.width;
const height = self.baseChart.chartConfig.height;
const circleRadius = (d.z - radii.min) / radiusStep;
const baseMagicNumber = 2;
const base = circleRadius ? Math.sqrt(circleRadius + baseMagicNumber) + lineWidth : lineWidth;
return _.min([base, width, height]) + (modifier || 0);
};
}
circles
.enter()
.append('circle')
.attr('r', getCircleRadiusFn())
.attr('fill-opacity', (this.seriesConfig.drawLinesBetweenPoints ? 1 : 0.7))
.attr('cx', isHorizontal ? cx : cy)
.attr('cy', isHorizontal ? cy : cx)
.attr('class', 'circle-decoration')
.attr('data-label', data.label)
.attr('fill', colorCircle);
circles
.enter()
.append('circle')
.attr('r', getCircleRadiusFn(10))
.attr('cx', isHorizontal ? cx : cy)
.attr('cy', isHorizontal ? cy : cx)
.attr('fill', 'transparent')
.attr('class', 'circle')
.attr('data-label', data.label)
.attr('stroke', cColor)
.attr('stroke-width', 0);
if (isTooltip) {
circles.call(tooltip.render());
}
return circles;
}
/**
* Adds path to SVG
*
* @method addLines
* @param svg {HTMLElement} SVG to which path are appended
* @param data {Array} Array of object data points
* @returns {D3.UpdateSelection} SVG with paths added
*/
addLine(svg, data) {
const xScale = this.getCategoryAxis().getScale();
const yScale = this.getValueAxis().getScale();
const color = this.handler.data.getColorFunc();
const ordered = this.handler.data.get('ordered');
const lineWidth = this.seriesConfig.lineWidth;
const interpolate = this.seriesConfig.interpolate;
const isHorizontal = this.getCategoryAxis().axisConfig.isHorizontal();
const line = svg.append('g')
.attr('class', 'pathgroup lines')
.attr('clip-path', 'url(#' + this.baseChart.clipPathId + ')');
function cx(d) {
if (ordered && ordered.date) {
return xScale(d.x);
}
return xScale(d.x) + xScale.rangeBand() / 2;
}
function cy(d) {
const y = d.y || 0;
const y0 = d.y0 || 0;
return yScale(y0 + y);
}
line.append('path')
.attr('data-label', data.label)
.attr('d', () => {
const d3Line = d3.svg.line()
.defined(function (d) {
return !_.isNull(d.y);
})
.interpolate(interpolate)
.x(isHorizontal ? cx : cy)
.y(isHorizontal ? cy : cx);
return d3Line(data.values.filter(function (d) {
return !_.isNull(d.y);
}));
})
.attr('fill', 'none')
.attr('stroke', () => {
return color(data.label);
})
.attr('stroke-width', lineWidth);
return line;
}
/**
* Renders d3 visualization
*
* @method draw
* @returns {Function} Creates the line chart
*/
draw() {
const self = this;
return function (selection) {
selection.each(function () {
const svg = self.chartEl.append('g');
svg.data([self.chartData]);
if (self.seriesConfig.drawLinesBetweenPoints) {
self.addLine(svg, self.chartData);
}
const circles = self.addCircles(svg, self.chartData);
self.addCircleEvents(circles);
self.events.emit('rendered', {
chart: self.chartData
});
return svg;
});
};
}
}
return LineChart;
}
|
# Generated by Django 3.2.9 on 2021-11-19 20:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main_app', '0003_alter_golfgroup_members'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='profile_img',
field=models.ImageField(blank=True, default='profile/profile-image-default.jpeg', null=True, upload_to='profile'),
),
]
|
# coding: utf-8
from __future__ import unicode_literals
from core.models import BaseManager, Category, Detail, Subcategory
from . import DETAIL_TYPE
class MaterialTypeManager(BaseManager):
def get_queryset(self):
q = super(MaterialTypeManager, self).get_queryset()
return q.filter(type=DETAIL_TYPE)
class Material(Category):
class Meta:
proxy = True
class MaterialSubcategory(Subcategory):
objects = MaterialTypeManager()
class Meta:
proxy = True
verbose_name = 'Material subcategory'
verbose_name_plural = 'Material subcategories'
def save(self, *args, **kwargs):
self.type = DETAIL_TYPE
super(MaterialSubcategory, self).save(*args, **kwargs)
class MaterialDetail(Detail):
objects = MaterialTypeManager()
class Meta:
proxy = True
verbose_name = 'Material detail'
verbose_name_plural = 'Material details'
def save(self, *args, **kwargs):
self.type = DETAIL_TYPE
super(MaterialDetail, self).save(*args, **kwargs) |
/**
* @module ol/format/KML
*/
import Feature from '../Feature.js';
import {extend, includes} from '../array.js';
import {assert} from '../asserts.js';
import {asArray} from '../color.js';
import {transformGeometryWithOptions} from './Feature.js';
import XMLFeature from './XMLFeature.js';
import {readDecimal, readBoolean, readString, writeStringTextNode, writeCDATASection, writeDecimalTextNode, writeBooleanTextNode} from './xsd.js';
import GeometryCollection from '../geom/GeometryCollection.js';
import GeometryLayout from '../geom/GeometryLayout.js';
import GeometryType from '../geom/GeometryType.js';
import LineString from '../geom/LineString.js';
import MultiLineString from '../geom/MultiLineString.js';
import MultiPoint from '../geom/MultiPoint.js';
import MultiPolygon from '../geom/MultiPolygon.js';
import Point from '../geom/Point.js';
import Polygon from '../geom/Polygon.js';
import {toRadians} from '../math.js';
import {get as getProjection} from '../proj.js';
import Fill from '../style/Fill.js';
import Icon from '../style/Icon.js';
import IconAnchorUnits from '../style/IconAnchorUnits.js';
import IconOrigin from '../style/IconOrigin.js';
import Stroke from '../style/Stroke.js';
import Style from '../style/Style.js';
import Text from '../style/Text.js';
import {createElementNS, getAllTextContent, isDocument, makeArrayExtender,
makeArrayPusher, makeChildAppender, makeObjectPropertySetter,
makeReplacer, makeSequence, makeSimpleNodeFactory, makeStructureNS,
OBJECT_PROPERTY_NODE_FACTORY, parse, parseNode, pushParseAndPop,
pushSerializeAndPop, XML_SCHEMA_INSTANCE_URI} from '../xml.js';
/**
* @typedef {Object} Vec2
* @property {number} x
* @property {IconAnchorUnits} xunits
* @property {number} y
* @property {IconAnchorUnits} yunits
* @property {IconOrigin} origin
*/
/**
* @typedef {Object} GxTrackObject
* @property {Array<number>} flatCoordinates
* @property {Array<number>} whens
*/
/**
* @const
* @type {Array<string>}
*/
const GX_NAMESPACE_URIS = [
'http://www.google.com/kml/ext/2.2'
];
/**
* @const
* @type {Array<null|string>}
*/
const NAMESPACE_URIS = [
null,
'http://earth.google.com/kml/2.0',
'http://earth.google.com/kml/2.1',
'http://earth.google.com/kml/2.2',
'http://www.opengis.net/kml/2.2'
];
/**
* @const
* @type {string}
*/
const SCHEMA_LOCATION = 'http://www.opengis.net/kml/2.2 ' +
'https://developers.google.com/kml/schema/kml22gx.xsd';
/**
* @type {Object<string, IconAnchorUnits>}
*/
const ICON_ANCHOR_UNITS_MAP = {
'fraction': IconAnchorUnits.FRACTION,
'pixels': IconAnchorUnits.PIXELS,
'insetPixels': IconAnchorUnits.PIXELS
};
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const PLACEMARK_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'ExtendedData': extendedDataParser,
'Region': regionParser,
'MultiGeometry': makeObjectPropertySetter(
readMultiGeometry, 'geometry'),
'LineString': makeObjectPropertySetter(
readLineString, 'geometry'),
'LinearRing': makeObjectPropertySetter(
readLinearRing, 'geometry'),
'Point': makeObjectPropertySetter(
readPoint, 'geometry'),
'Polygon': makeObjectPropertySetter(
readPolygon, 'geometry'),
'Style': makeObjectPropertySetter(readStyle),
'StyleMap': placemarkStyleMapParser,
'address': makeObjectPropertySetter(readString),
'description': makeObjectPropertySetter(readString),
'name': makeObjectPropertySetter(readString),
'open': makeObjectPropertySetter(readBoolean),
'phoneNumber': makeObjectPropertySetter(readString),
'styleUrl': makeObjectPropertySetter(readURI),
'visibility': makeObjectPropertySetter(readBoolean)
}, makeStructureNS(
GX_NAMESPACE_URIS, {
'MultiTrack': makeObjectPropertySetter(
readGxMultiTrack, 'geometry'),
'Track': makeObjectPropertySetter(
readGxTrack, 'geometry')
}
));
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const NETWORK_LINK_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'ExtendedData': extendedDataParser,
'Region': regionParser,
'Link': linkParser,
'address': makeObjectPropertySetter(readString),
'description': makeObjectPropertySetter(readString),
'name': makeObjectPropertySetter(readString),
'open': makeObjectPropertySetter(readBoolean),
'phoneNumber': makeObjectPropertySetter(readString),
'visibility': makeObjectPropertySetter(readBoolean)
});
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const LINK_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'href': makeObjectPropertySetter(readURI)
});
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const REGION_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'LatLonAltBox': latLonAltBoxParser,
'Lod': lodParser
});
/**
* @const
* @type {Object<string, Array<string>>}
*/
// @ts-ignore
const KML_SEQUENCE = makeStructureNS(
NAMESPACE_URIS, [
'Document', 'Placemark'
]);
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Serializer>>}
*/
// @ts-ignore
const KML_SERIALIZERS = makeStructureNS(
NAMESPACE_URIS, {
'Document': makeChildAppender(writeDocument),
'Placemark': makeChildAppender(writePlacemark)
});
/**
* @type {import("../color.js").Color}
*/
let DEFAULT_COLOR;
/**
* @type {Fill}
*/
let DEFAULT_FILL_STYLE = null;
/**
* Get the default fill style (or null if not yet set).
* @return {Fill} The default fill style.
*/
export function getDefaultFillStyle() {
return DEFAULT_FILL_STYLE;
}
/**
* @type {import("../size.js").Size}
*/
let DEFAULT_IMAGE_STYLE_ANCHOR;
/**
* @type {IconAnchorUnits}
*/
let DEFAULT_IMAGE_STYLE_ANCHOR_X_UNITS;
/**
* @type {IconAnchorUnits}
*/
let DEFAULT_IMAGE_STYLE_ANCHOR_Y_UNITS;
/**
* @type {import("../size.js").Size}
*/
let DEFAULT_IMAGE_STYLE_SIZE;
/**
* @type {string}
*/
let DEFAULT_IMAGE_STYLE_SRC;
/**
* @type {number}
*/
let DEFAULT_IMAGE_SCALE_MULTIPLIER;
/**
* @type {import("../style/Image.js").default}
*/
let DEFAULT_IMAGE_STYLE = null;
/**
* Get the default image style (or null if not yet set).
* @return {import("../style/Image.js").default} The default image style.
*/
export function getDefaultImageStyle() {
return DEFAULT_IMAGE_STYLE;
}
/**
* @type {string}
*/
let DEFAULT_NO_IMAGE_STYLE;
/**
* @type {Stroke}
*/
let DEFAULT_STROKE_STYLE = null;
/**
* Get the default stroke style (or null if not yet set).
* @return {Stroke} The default stroke style.
*/
export function getDefaultStrokeStyle() {
return DEFAULT_STROKE_STYLE;
}
/**
* @type {Stroke}
*/
let DEFAULT_TEXT_STROKE_STYLE;
/**
* @type {Text}
*/
let DEFAULT_TEXT_STYLE = null;
/**
* Get the default text style (or null if not yet set).
* @return {Text} The default text style.
*/
export function getDefaultTextStyle() {
return DEFAULT_TEXT_STYLE;
}
/**
* @type {Style}
*/
let DEFAULT_STYLE = null;
/**
* Get the default style (or null if not yet set).
* @return {Style} The default style.
*/
export function getDefaultStyle() {
return DEFAULT_STYLE;
}
/**
* @type {Array<Style>}
*/
let DEFAULT_STYLE_ARRAY = null;
/**
* Get the default style array (or null if not yet set).
* @return {Array<Style>} The default style.
*/
export function getDefaultStyleArray() {
return DEFAULT_STYLE_ARRAY;
}
function createStyleDefaults() {
DEFAULT_COLOR = [255, 255, 255, 1];
DEFAULT_FILL_STYLE = new Fill({
color: DEFAULT_COLOR
});
DEFAULT_IMAGE_STYLE_ANCHOR = [20, 2]; // FIXME maybe [8, 32] ?
DEFAULT_IMAGE_STYLE_ANCHOR_X_UNITS = IconAnchorUnits.PIXELS;
DEFAULT_IMAGE_STYLE_ANCHOR_Y_UNITS = IconAnchorUnits.PIXELS;
DEFAULT_IMAGE_STYLE_SIZE = [64, 64];
DEFAULT_IMAGE_STYLE_SRC =
'https://maps.google.com/mapfiles/kml/pushpin/ylw-pushpin.png';
DEFAULT_IMAGE_SCALE_MULTIPLIER = 0.5;
DEFAULT_IMAGE_STYLE = new Icon({
anchor: DEFAULT_IMAGE_STYLE_ANCHOR,
anchorOrigin: IconOrigin.BOTTOM_LEFT,
anchorXUnits: DEFAULT_IMAGE_STYLE_ANCHOR_X_UNITS,
anchorYUnits: DEFAULT_IMAGE_STYLE_ANCHOR_Y_UNITS,
crossOrigin: 'anonymous',
rotation: 0,
scale: DEFAULT_IMAGE_SCALE_MULTIPLIER,
size: DEFAULT_IMAGE_STYLE_SIZE,
src: DEFAULT_IMAGE_STYLE_SRC
});
DEFAULT_NO_IMAGE_STYLE = 'NO_IMAGE';
DEFAULT_STROKE_STYLE = new Stroke({
color: DEFAULT_COLOR,
width: 1
});
DEFAULT_TEXT_STROKE_STYLE = new Stroke({
color: [51, 51, 51, 1],
width: 2
});
DEFAULT_TEXT_STYLE = new Text({
font: 'bold 16px Helvetica',
fill: DEFAULT_FILL_STYLE,
stroke: DEFAULT_TEXT_STROKE_STYLE,
scale: 0.8
});
DEFAULT_STYLE = new Style({
fill: DEFAULT_FILL_STYLE,
image: DEFAULT_IMAGE_STYLE,
text: DEFAULT_TEXT_STYLE,
stroke: DEFAULT_STROKE_STYLE,
zIndex: 0
});
DEFAULT_STYLE_ARRAY = [DEFAULT_STYLE];
}
/**
* @typedef {Object} Options
* @property {boolean} [extractStyles=true] Extract styles from the KML.
* @property {boolean} [showPointNames=true] Show names as labels for placemarks which contain points.
* @property {Array<Style>} [defaultStyle] Default style. The
* default default style is the same as Google Earth.
* @property {boolean} [writeStyles=true] Write styles into KML.
* @property {null|string} [crossOrigin='anonymous'] The `crossOrigin` attribute for loaded images. Note that you must provide a
* `crossOrigin` value if you want to access pixel data with the Canvas renderer.
*/
/**
* @classdesc
* Feature format for reading and writing data in the KML format.
*
* {@link module:ol/format/KML~KML#readFeature} will read the first feature from
* a KML source.
*
* MultiGeometries are converted into GeometryCollections if they are a mix of
* geometry types, and into MultiPoint/MultiLineString/MultiPolygon if they are
* all of the same type.
*
* Note that the KML format uses the URL() constructor. Older browsers such as IE
* which do not support this will need a URL polyfill to be loaded before use.
*
* @api
*/
class KML extends XMLFeature {
/**
* @param {Options=} opt_options Options.
*/
constructor(opt_options) {
super();
const options = opt_options ? opt_options : {};
if (!DEFAULT_STYLE_ARRAY) {
createStyleDefaults();
}
/**
* @inheritDoc
*/
this.dataProjection = getProjection('EPSG:4326');
/**
* @private
* @type {Array<Style>}
*/
this.defaultStyle_ = options.defaultStyle ?
options.defaultStyle : DEFAULT_STYLE_ARRAY;
/**
* @private
* @type {boolean}
*/
this.extractStyles_ = options.extractStyles !== undefined ?
options.extractStyles : true;
/**
* @private
* @type {boolean}
*/
this.writeStyles_ = options.writeStyles !== undefined ?
options.writeStyles : true;
/**
* @private
* @type {!Object<string, (Array<Style>|string)>}
*/
this.sharedStyles_ = {};
/**
* @private
* @type {boolean}
*/
this.showPointNames_ = options.showPointNames !== undefined ?
options.showPointNames : true;
/**
* @private
* @type {null|string}
*/
this.crossOrigin_ = options.crossOrigin !== undefined ?
options.crossOrigin : 'anonymous';
}
/**
* @param {Node} node Node.
* @param {Array<*>} objectStack Object stack.
* @private
* @return {Array<Feature>|undefined} Features.
*/
readDocumentOrFolder_(node, objectStack) {
// FIXME use scope somehow
const parsersNS = makeStructureNS(
NAMESPACE_URIS, {
'Document': makeArrayExtender(this.readDocumentOrFolder_, this),
'Folder': makeArrayExtender(this.readDocumentOrFolder_, this),
'Placemark': makeArrayPusher(this.readPlacemark_, this),
'Style': this.readSharedStyle_.bind(this),
'StyleMap': this.readSharedStyleMap_.bind(this)
});
/** @type {Array<Feature>} */
// @ts-ignore
const features = pushParseAndPop([], parsersNS, node, objectStack, this);
if (features) {
return features;
} else {
return undefined;
}
}
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @private
* @return {Feature|undefined} Feature.
*/
readPlacemark_(node, objectStack) {
const object = pushParseAndPop({'geometry': null},
PLACEMARK_PARSERS, node, objectStack, this);
if (!object) {
return undefined;
}
const feature = new Feature();
const id = node.getAttribute('id');
if (id !== null) {
feature.setId(id);
}
const options = /** @type {import("./Feature.js").ReadOptions} */ (objectStack[0]);
const geometry = object['geometry'];
if (geometry) {
transformGeometryWithOptions(geometry, false, options);
}
feature.setGeometry(geometry);
delete object['geometry'];
if (this.extractStyles_) {
const style = object['Style'];
const styleUrl = object['styleUrl'];
const styleFunction = createFeatureStyleFunction(
style, styleUrl, this.defaultStyle_, this.sharedStyles_,
this.showPointNames_);
feature.setStyle(styleFunction);
}
delete object['Style'];
// we do not remove the styleUrl property from the object, so it
// gets stored on feature when setProperties is called
feature.setProperties(object, true);
return feature;
}
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @private
*/
readSharedStyle_(node, objectStack) {
const id = node.getAttribute('id');
if (id !== null) {
const style = readStyle.call(this, node, objectStack);
if (style) {
let styleUri;
let baseURI = node.baseURI;
if (!baseURI || baseURI == 'about:blank') {
baseURI = window.location.href;
}
if (baseURI) {
const url = new URL('#' + id, baseURI);
styleUri = url.href;
} else {
styleUri = '#' + id;
}
this.sharedStyles_[styleUri] = style;
}
}
}
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @private
*/
readSharedStyleMap_(node, objectStack) {
const id = node.getAttribute('id');
if (id === null) {
return;
}
const styleMapValue = readStyleMapValue.call(this, node, objectStack);
if (!styleMapValue) {
return;
}
let styleUri;
let baseURI = node.baseURI;
if (!baseURI || baseURI == 'about:blank') {
baseURI = window.location.href;
}
if (baseURI) {
const url = new URL('#' + id, baseURI);
styleUri = url.href;
} else {
styleUri = '#' + id;
}
this.sharedStyles_[styleUri] = styleMapValue;
}
/**
* @inheritDoc
*/
readFeatureFromNode(node, opt_options) {
if (!includes(NAMESPACE_URIS, node.namespaceURI)) {
return null;
}
const feature = this.readPlacemark_(
node, [this.getReadOptions(node, opt_options)]);
if (feature) {
return feature;
} else {
return null;
}
}
/**
* @inheritDoc
*/
readFeaturesFromNode(node, opt_options) {
if (!includes(NAMESPACE_URIS, node.namespaceURI)) {
return [];
}
let features;
const localName = node.localName;
if (localName == 'Document' || localName == 'Folder') {
features = this.readDocumentOrFolder_(
node, [this.getReadOptions(node, opt_options)]);
if (features) {
return features;
} else {
return [];
}
} else if (localName == 'Placemark') {
const feature = this.readPlacemark_(
node, [this.getReadOptions(node, opt_options)]);
if (feature) {
return [feature];
} else {
return [];
}
} else if (localName == 'kml') {
features = [];
for (let n = node.firstElementChild; n; n = n.nextElementSibling) {
const fs = this.readFeaturesFromNode(n, opt_options);
if (fs) {
extend(features, fs);
}
}
return features;
} else {
return [];
}
}
/**
* Read the name of the KML.
*
* @param {Document|Element|string} source Source.
* @return {string|undefined} Name.
* @api
*/
readName(source) {
if (!source) {
return undefined;
} else if (typeof source === 'string') {
const doc = parse(source);
return this.readNameFromDocument(doc);
} else if (isDocument(source)) {
return this.readNameFromDocument(/** @type {Document} */ (source));
} else {
return this.readNameFromNode(/** @type {Element} */ (source));
}
}
/**
* @param {Document} doc Document.
* @return {string|undefined} Name.
*/
readNameFromDocument(doc) {
for (let n = /** @type {Node} */ (doc.firstChild); n; n = n.nextSibling) {
if (n.nodeType == Node.ELEMENT_NODE) {
const name = this.readNameFromNode(/** @type {Element} */ (n));
if (name) {
return name;
}
}
}
return undefined;
}
/**
* @param {Element} node Node.
* @return {string|undefined} Name.
*/
readNameFromNode(node) {
for (let n = node.firstElementChild; n; n = n.nextElementSibling) {
if (includes(NAMESPACE_URIS, n.namespaceURI) &&
n.localName == 'name') {
return readString(n);
}
}
for (let n = node.firstElementChild; n; n = n.nextElementSibling) {
const localName = n.localName;
if (includes(NAMESPACE_URIS, n.namespaceURI) &&
(localName == 'Document' ||
localName == 'Folder' ||
localName == 'Placemark' ||
localName == 'kml')) {
const name = this.readNameFromNode(n);
if (name) {
return name;
}
}
}
return undefined;
}
/**
* Read the network links of the KML.
*
* @param {Document|Element|string} source Source.
* @return {Array<Object>} Network links.
* @api
*/
readNetworkLinks(source) {
const networkLinks = [];
if (typeof source === 'string') {
const doc = parse(source);
extend(networkLinks, this.readNetworkLinksFromDocument(doc));
} else if (isDocument(source)) {
extend(networkLinks, this.readNetworkLinksFromDocument(
/** @type {Document} */ (source)));
} else {
extend(networkLinks, this.readNetworkLinksFromNode(
/** @type {Element} */ (source)));
}
return networkLinks;
}
/**
* @param {Document} doc Document.
* @return {Array<Object>} Network links.
*/
readNetworkLinksFromDocument(doc) {
const networkLinks = [];
for (let n = /** @type {Node} */ (doc.firstChild); n; n = n.nextSibling) {
if (n.nodeType == Node.ELEMENT_NODE) {
extend(networkLinks, this.readNetworkLinksFromNode(/** @type {Element} */ (n)));
}
}
return networkLinks;
}
/**
* @param {Element} node Node.
* @return {Array<Object>} Network links.
*/
readNetworkLinksFromNode(node) {
const networkLinks = [];
for (let n = node.firstElementChild; n; n = n.nextElementSibling) {
if (includes(NAMESPACE_URIS, n.namespaceURI) &&
n.localName == 'NetworkLink') {
const obj = pushParseAndPop({}, NETWORK_LINK_PARSERS,
n, []);
networkLinks.push(obj);
}
}
for (let n = node.firstElementChild; n; n = n.nextElementSibling) {
const localName = n.localName;
if (includes(NAMESPACE_URIS, n.namespaceURI) &&
(localName == 'Document' ||
localName == 'Folder' ||
localName == 'kml')) {
extend(networkLinks, this.readNetworkLinksFromNode(n));
}
}
return networkLinks;
}
/**
* Read the regions of the KML.
*
* @param {Document|Element|string} source Source.
* @return {Array<Object>} Regions.
* @api
*/
readRegion(source) {
const regions = [];
if (typeof source === 'string') {
const doc = parse(source);
extend(regions, this.readRegionFromDocument(doc));
} else if (isDocument(source)) {
extend(regions, this.readRegionFromDocument(
/** @type {Document} */ (source)));
} else {
extend(regions, this.readRegionFromNode(
/** @type {Element} */ (source)));
}
return regions;
}
/**
* @param {Document} doc Document.
* @return {Array<Object>} Region.
*/
readRegionFromDocument(doc) {
const regions = [];
for (let n = /** @type {Node} */ (doc.firstChild); n; n = n.nextSibling) {
if (n.nodeType == Node.ELEMENT_NODE) {
extend(regions, this.readRegionFromNode(/** @type {Element} */ (n)));
}
}
return regions;
}
/**
* @param {Element} node Node.
* @return {Array<Object>} Region.
* @api
*/
readRegionFromNode(node) {
const regions = [];
for (let n = node.firstElementChild; n; n = n.nextElementSibling) {
if (includes(NAMESPACE_URIS, n.namespaceURI) &&
n.localName == 'Region') {
const obj = pushParseAndPop({}, REGION_PARSERS,
n, []);
regions.push(obj);
}
}
for (let n = node.firstElementChild; n; n = n.nextElementSibling) {
const localName = n.localName;
if (includes(NAMESPACE_URIS, n.namespaceURI) &&
(localName == 'Document' ||
localName == 'Folder' ||
localName == 'kml')) {
extend(regions, this.readRegionFromNode(n));
}
}
return regions;
}
/**
* Encode an array of features in the KML format as an XML node. GeometryCollections,
* MultiPoints, MultiLineStrings, and MultiPolygons are output as MultiGeometries.
*
* @param {Array<Feature>} features Features.
* @param {import("./Feature.js").WriteOptions=} opt_options Options.
* @return {Node} Node.
* @override
* @api
*/
writeFeaturesNode(features, opt_options) {
opt_options = this.adaptOptions(opt_options);
const kml = createElementNS(NAMESPACE_URIS[4], 'kml');
const xmlnsUri = 'http://www.w3.org/2000/xmlns/';
kml.setAttributeNS(xmlnsUri, 'xmlns:gx', GX_NAMESPACE_URIS[0]);
kml.setAttributeNS(xmlnsUri, 'xmlns:xsi', XML_SCHEMA_INSTANCE_URI);
kml.setAttributeNS(XML_SCHEMA_INSTANCE_URI, 'xsi:schemaLocation', SCHEMA_LOCATION);
const /** @type {import("../xml.js").NodeStackItem} */ context = {node: kml};
/** @type {!Object<string, (Array<Feature>|Feature|undefined)>} */
const properties = {};
if (features.length > 1) {
properties['Document'] = features;
} else if (features.length == 1) {
properties['Placemark'] = features[0];
}
const orderedKeys = KML_SEQUENCE[kml.namespaceURI];
const values = makeSequence(properties, orderedKeys);
pushSerializeAndPop(context, KML_SERIALIZERS,
OBJECT_PROPERTY_NODE_FACTORY, values, [opt_options], orderedKeys,
this);
return kml;
}
}
/**
* @param {Style|undefined} foundStyle Style.
* @param {string} name Name.
* @return {Style} style Style.
*/
function createNameStyleFunction(foundStyle, name) {
let textStyle = null;
const textOffset = [0, 0];
let textAlign = 'start';
if (foundStyle.getImage()) {
let imageSize = foundStyle.getImage().getImageSize();
if (imageSize === null) {
imageSize = DEFAULT_IMAGE_STYLE_SIZE;
}
if (imageSize.length == 2) {
const imageScale = foundStyle.getImage().getScale();
// Offset the label to be centered to the right of the icon, if there is
// one.
textOffset[0] = imageScale * imageSize[0] / 2;
textOffset[1] = -imageScale * imageSize[1] / 2;
textAlign = 'left';
}
}
if (foundStyle.getText() !== null) {
// clone the text style, customizing it with name, alignments and offset.
// Note that kml does not support many text options that OpenLayers does (rotation, textBaseline).
const foundText = foundStyle.getText();
textStyle = foundText.clone();
textStyle.setFont(foundText.getFont() || DEFAULT_TEXT_STYLE.getFont());
textStyle.setScale(foundText.getScale() || DEFAULT_TEXT_STYLE.getScale());
textStyle.setFill(foundText.getFill() || DEFAULT_TEXT_STYLE.getFill());
textStyle.setStroke(foundText.getStroke() || DEFAULT_TEXT_STROKE_STYLE);
} else {
textStyle = DEFAULT_TEXT_STYLE.clone();
}
textStyle.setText(name);
textStyle.setOffsetX(textOffset[0]);
textStyle.setOffsetY(textOffset[1]);
textStyle.setTextAlign(textAlign);
const nameStyle = new Style({
text: textStyle
});
return nameStyle;
}
/**
* @param {Array<Style>|undefined} style Style.
* @param {string} styleUrl Style URL.
* @param {Array<Style>} defaultStyle Default style.
* @param {!Object<string, (Array<Style>|string)>} sharedStyles Shared styles.
* @param {boolean|undefined} showPointNames true to show names for point placemarks.
* @return {import("../style/Style.js").StyleFunction} Feature style function.
*/
function createFeatureStyleFunction(style, styleUrl, defaultStyle, sharedStyles, showPointNames) {
return (
/**
* @param {Feature} feature feature.
* @param {number} resolution Resolution.
* @return {Array<Style>} Style.
*/
function(feature, resolution) {
let drawName = showPointNames;
/** @type {Style|undefined} */
let nameStyle;
let name = '';
if (drawName) {
const geometry = feature.getGeometry();
if (geometry) {
drawName = geometry.getType() === GeometryType.POINT;
}
}
if (drawName) {
name = /** @type {string} */ (feature.get('name'));
drawName = drawName && !!name;
}
if (style) {
if (drawName) {
nameStyle = createNameStyleFunction(style[0], name);
return style.concat(nameStyle);
}
return style;
}
if (styleUrl) {
const foundStyle = findStyle(styleUrl, defaultStyle, sharedStyles);
if (drawName) {
nameStyle = createNameStyleFunction(foundStyle[0], name);
return foundStyle.concat(nameStyle);
}
return foundStyle;
}
if (drawName) {
nameStyle = createNameStyleFunction(defaultStyle[0], name);
return defaultStyle.concat(nameStyle);
}
return defaultStyle;
}
);
}
/**
* @param {Array<Style>|string|undefined} styleValue Style value.
* @param {Array<Style>} defaultStyle Default style.
* @param {!Object<string, (Array<Style>|string)>} sharedStyles
* Shared styles.
* @return {Array<Style>} Style.
*/
function findStyle(styleValue, defaultStyle, sharedStyles) {
if (Array.isArray(styleValue)) {
return styleValue;
} else if (typeof styleValue === 'string') {
// KML files in the wild occasionally forget the leading `#` on styleUrls
// defined in the same document. Add a leading `#` if it enables to find
// a style.
if (!(styleValue in sharedStyles) && ('#' + styleValue in sharedStyles)) {
styleValue = '#' + styleValue;
}
return findStyle(sharedStyles[styleValue], defaultStyle, sharedStyles);
} else {
return defaultStyle;
}
}
/**
* @param {Node} node Node.
* @return {import("../color.js").Color|undefined} Color.
*/
function readColor(node) {
const s = getAllTextContent(node, false);
// The KML specification states that colors should not include a leading `#`
// but we tolerate them.
const m = /^\s*#?\s*([0-9A-Fa-f]{8})\s*$/.exec(s);
if (m) {
const hexColor = m[1];
return [
parseInt(hexColor.substr(6, 2), 16),
parseInt(hexColor.substr(4, 2), 16),
parseInt(hexColor.substr(2, 2), 16),
parseInt(hexColor.substr(0, 2), 16) / 255
];
} else {
return undefined;
}
}
/**
* @param {Node} node Node.
* @return {Array<number>|undefined} Flat coordinates.
*/
export function readFlatCoordinates(node) {
let s = getAllTextContent(node, false);
const flatCoordinates = [];
// The KML specification states that coordinate tuples should not include
// spaces, but we tolerate them.
const re =
/^\s*([+\-]?\d*\.?\d+(?:e[+\-]?\d+)?)\s*,\s*([+\-]?\d*\.?\d+(?:e[+\-]?\d+)?)(?:\s*,\s*([+\-]?\d*\.?\d+(?:e[+\-]?\d+)?))?\s*/i;
let m;
while ((m = re.exec(s))) {
const x = parseFloat(m[1]);
const y = parseFloat(m[2]);
const z = m[3] ? parseFloat(m[3]) : 0;
flatCoordinates.push(x, y, z);
s = s.substr(m[0].length);
}
if (s !== '') {
return undefined;
}
return flatCoordinates;
}
/**
* @param {Node} node Node.
* @return {string} URI.
*/
function readURI(node) {
const s = getAllTextContent(node, false).trim();
let baseURI = node.baseURI;
if (!baseURI || baseURI == 'about:blank') {
baseURI = window.location.href;
}
if (baseURI) {
const url = new URL(s, baseURI);
return url.href;
} else {
return s;
}
}
/**
* @param {Element} node Node.
* @return {Vec2} Vec2.
*/
function readVec2(node) {
const xunits = node.getAttribute('xunits');
const yunits = node.getAttribute('yunits');
let origin;
if (xunits !== 'insetPixels') {
if (yunits !== 'insetPixels') {
origin = IconOrigin.BOTTOM_LEFT;
} else {
origin = IconOrigin.TOP_LEFT;
}
} else {
if (yunits !== 'insetPixels') {
origin = IconOrigin.BOTTOM_RIGHT;
} else {
origin = IconOrigin.TOP_RIGHT;
}
}
return {
x: parseFloat(node.getAttribute('x')),
xunits: ICON_ANCHOR_UNITS_MAP[xunits],
y: parseFloat(node.getAttribute('y')),
yunits: ICON_ANCHOR_UNITS_MAP[yunits],
origin: origin
};
}
/**
* @param {Node} node Node.
* @return {number|undefined} Scale.
*/
function readScale(node) {
return readDecimal(node);
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const STYLE_MAP_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'Pair': pairDataParser
});
/**
* @this {KML}
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @return {Array<Style>|string|undefined} StyleMap.
*/
function readStyleMapValue(node, objectStack) {
return pushParseAndPop(undefined,
STYLE_MAP_PARSERS, node, objectStack, this);
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const ICON_STYLE_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'Icon': makeObjectPropertySetter(readIcon),
'heading': makeObjectPropertySetter(readDecimal),
'hotSpot': makeObjectPropertySetter(readVec2),
'scale': makeObjectPropertySetter(readScale)
});
/**
* @this {KML}
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function iconStyleParser(node, objectStack) {
// FIXME refreshMode
// FIXME refreshInterval
// FIXME viewRefreshTime
// FIXME viewBoundScale
// FIXME viewFormat
// FIXME httpQuery
const object = pushParseAndPop(
{}, ICON_STYLE_PARSERS, node, objectStack);
if (!object) {
return;
}
const styleObject = /** @type {Object} */ (objectStack[objectStack.length - 1]);
const IconObject = 'Icon' in object ? object['Icon'] : {};
const drawIcon = (!('Icon' in object) || Object.keys(IconObject).length > 0);
let src;
const href = /** @type {string|undefined} */
(IconObject['href']);
if (href) {
src = href;
} else if (drawIcon) {
src = DEFAULT_IMAGE_STYLE_SRC;
}
let anchor, anchorXUnits, anchorYUnits;
let anchorOrigin = IconOrigin.BOTTOM_LEFT;
const hotSpot = /** @type {Vec2|undefined} */
(object['hotSpot']);
if (hotSpot) {
anchor = [hotSpot.x, hotSpot.y];
anchorXUnits = hotSpot.xunits;
anchorYUnits = hotSpot.yunits;
anchorOrigin = hotSpot.origin;
} else if (src === DEFAULT_IMAGE_STYLE_SRC) {
anchor = DEFAULT_IMAGE_STYLE_ANCHOR;
anchorXUnits = DEFAULT_IMAGE_STYLE_ANCHOR_X_UNITS;
anchorYUnits = DEFAULT_IMAGE_STYLE_ANCHOR_Y_UNITS;
} else if (/^http:\/\/maps\.(?:google|gstatic)\.com\//.test(src)) {
anchor = [0.5, 0];
anchorXUnits = IconAnchorUnits.FRACTION;
anchorYUnits = IconAnchorUnits.FRACTION;
}
let offset;
const x = /** @type {number|undefined} */
(IconObject['x']);
const y = /** @type {number|undefined} */
(IconObject['y']);
if (x !== undefined && y !== undefined) {
offset = [x, y];
}
let size;
const w = /** @type {number|undefined} */
(IconObject['w']);
const h = /** @type {number|undefined} */
(IconObject['h']);
if (w !== undefined && h !== undefined) {
size = [w, h];
}
let rotation;
const heading = /** @type {number} */
(object['heading']);
if (heading !== undefined) {
rotation = toRadians(heading);
}
let scale = /** @type {number|undefined} */
(object['scale']);
if (drawIcon) {
if (src == DEFAULT_IMAGE_STYLE_SRC) {
size = DEFAULT_IMAGE_STYLE_SIZE;
if (scale === undefined) {
scale = DEFAULT_IMAGE_SCALE_MULTIPLIER;
}
}
const imageStyle = new Icon({
anchor: anchor,
anchorOrigin: anchorOrigin,
anchorXUnits: anchorXUnits,
anchorYUnits: anchorYUnits,
crossOrigin: this.crossOrigin_,
offset: offset,
offsetOrigin: IconOrigin.BOTTOM_LEFT,
rotation: rotation,
scale: scale,
size: size,
src: src
});
styleObject['imageStyle'] = imageStyle;
} else {
// handle the case when we explicitly want to draw no icon.
styleObject['imageStyle'] = DEFAULT_NO_IMAGE_STYLE;
}
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const LABEL_STYLE_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'color': makeObjectPropertySetter(readColor),
'scale': makeObjectPropertySetter(readScale)
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function labelStyleParser(node, objectStack) {
// FIXME colorMode
const object = pushParseAndPop(
{}, LABEL_STYLE_PARSERS, node, objectStack);
if (!object) {
return;
}
const styleObject = objectStack[objectStack.length - 1];
const textStyle = new Text({
fill: new Fill({
color: /** @type {import("../color.js").Color} */
('color' in object ? object['color'] : DEFAULT_COLOR)
}),
scale: /** @type {number|undefined} */
(object['scale'])
});
styleObject['textStyle'] = textStyle;
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const LINE_STYLE_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'color': makeObjectPropertySetter(readColor),
'width': makeObjectPropertySetter(readDecimal)
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function lineStyleParser(node, objectStack) {
// FIXME colorMode
// FIXME gx:outerColor
// FIXME gx:outerWidth
// FIXME gx:physicalWidth
// FIXME gx:labelVisibility
const object = pushParseAndPop(
{}, LINE_STYLE_PARSERS, node, objectStack);
if (!object) {
return;
}
const styleObject = objectStack[objectStack.length - 1];
const strokeStyle = new Stroke({
color: /** @type {import("../color.js").Color} */
('color' in object ? object['color'] : DEFAULT_COLOR),
width: /** @type {number} */ ('width' in object ? object['width'] : 1)
});
styleObject['strokeStyle'] = strokeStyle;
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const POLY_STYLE_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'color': makeObjectPropertySetter(readColor),
'fill': makeObjectPropertySetter(readBoolean),
'outline': makeObjectPropertySetter(readBoolean)
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function polyStyleParser(node, objectStack) {
// FIXME colorMode
const object = pushParseAndPop(
{}, POLY_STYLE_PARSERS, node, objectStack);
if (!object) {
return;
}
const styleObject = objectStack[objectStack.length - 1];
const fillStyle = new Fill({
color: /** @type {import("../color.js").Color} */
('color' in object ? object['color'] : DEFAULT_COLOR)
});
styleObject['fillStyle'] = fillStyle;
const fill = /** @type {boolean|undefined} */ (object['fill']);
if (fill !== undefined) {
styleObject['fill'] = fill;
}
const outline = /** @type {boolean|undefined} */ (object['outline']);
if (outline !== undefined) {
styleObject['outline'] = outline;
}
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const FLAT_LINEAR_RING_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'coordinates': makeReplacer(readFlatCoordinates)
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @return {Array<number>} LinearRing flat coordinates.
*/
function readFlatLinearRing(node, objectStack) {
return pushParseAndPop(null,
FLAT_LINEAR_RING_PARSERS, node, objectStack);
}
/**
* @param {Node} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function gxCoordParser(node, objectStack) {
const gxTrackObject = /** @type {GxTrackObject} */
(objectStack[objectStack.length - 1]);
const flatCoordinates = gxTrackObject.flatCoordinates;
const s = getAllTextContent(node, false);
const re =
/^\s*([+\-]?\d+(?:\.\d*)?(?:e[+\-]?\d*)?)\s+([+\-]?\d+(?:\.\d*)?(?:e[+\-]?\d*)?)\s+([+\-]?\d+(?:\.\d*)?(?:e[+\-]?\d*)?)\s*$/i;
const m = re.exec(s);
if (m) {
const x = parseFloat(m[1]);
const y = parseFloat(m[2]);
const z = parseFloat(m[3]);
flatCoordinates.push(x, y, z, 0);
} else {
flatCoordinates.push(0, 0, 0, 0);
}
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const GX_MULTITRACK_GEOMETRY_PARSERS = makeStructureNS(
GX_NAMESPACE_URIS, {
'Track': makeArrayPusher(readGxTrack)
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @return {MultiLineString|undefined} MultiLineString.
*/
function readGxMultiTrack(node, objectStack) {
const lineStrings = pushParseAndPop([],
GX_MULTITRACK_GEOMETRY_PARSERS, node, objectStack);
if (!lineStrings) {
return undefined;
}
return new MultiLineString(lineStrings);
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const GX_TRACK_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'when': whenParser
}, makeStructureNS(
GX_NAMESPACE_URIS, {
'coord': gxCoordParser
}));
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @return {LineString|undefined} LineString.
*/
function readGxTrack(node, objectStack) {
const gxTrackObject = pushParseAndPop(
/** @type {GxTrackObject} */ ({
flatCoordinates: [],
whens: []
}), GX_TRACK_PARSERS, node, objectStack);
if (!gxTrackObject) {
return undefined;
}
const flatCoordinates = gxTrackObject.flatCoordinates;
const whens = gxTrackObject.whens;
for (let i = 0, ii = Math.min(flatCoordinates.length, whens.length); i < ii; ++i) {
flatCoordinates[4 * i + 3] = whens[i];
}
return new LineString(flatCoordinates, GeometryLayout.XYZM);
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const ICON_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'href': makeObjectPropertySetter(readURI)
}, makeStructureNS(
GX_NAMESPACE_URIS, {
'x': makeObjectPropertySetter(readDecimal),
'y': makeObjectPropertySetter(readDecimal),
'w': makeObjectPropertySetter(readDecimal),
'h': makeObjectPropertySetter(readDecimal)
}));
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @return {Object} Icon object.
*/
function readIcon(node, objectStack) {
const iconObject = pushParseAndPop(
{}, ICON_PARSERS, node, objectStack);
if (iconObject) {
return iconObject;
} else {
return null;
}
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const GEOMETRY_FLAT_COORDINATES_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'coordinates': makeReplacer(readFlatCoordinates)
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @return {Array<number>} Flat coordinates.
*/
function readFlatCoordinatesFromNode(node, objectStack) {
return pushParseAndPop(null,
GEOMETRY_FLAT_COORDINATES_PARSERS, node, objectStack);
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const EXTRUDE_AND_ALTITUDE_MODE_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'extrude': makeObjectPropertySetter(readBoolean),
'tessellate': makeObjectPropertySetter(readBoolean),
'altitudeMode': makeObjectPropertySetter(readString)
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @return {LineString|undefined} LineString.
*/
function readLineString(node, objectStack) {
const properties = pushParseAndPop({},
EXTRUDE_AND_ALTITUDE_MODE_PARSERS, node,
objectStack);
const flatCoordinates =
readFlatCoordinatesFromNode(node, objectStack);
if (flatCoordinates) {
const lineString = new LineString(flatCoordinates, GeometryLayout.XYZ);
lineString.setProperties(properties, true);
return lineString;
} else {
return undefined;
}
}
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @return {Polygon|undefined} Polygon.
*/
function readLinearRing(node, objectStack) {
const properties = pushParseAndPop({},
EXTRUDE_AND_ALTITUDE_MODE_PARSERS, node,
objectStack);
const flatCoordinates =
readFlatCoordinatesFromNode(node, objectStack);
if (flatCoordinates) {
const polygon = new Polygon(flatCoordinates, GeometryLayout.XYZ, [flatCoordinates.length]);
polygon.setProperties(properties, true);
return polygon;
} else {
return undefined;
}
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const MULTI_GEOMETRY_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'LineString': makeArrayPusher(readLineString),
'LinearRing': makeArrayPusher(readLinearRing),
'MultiGeometry': makeArrayPusher(readMultiGeometry),
'Point': makeArrayPusher(readPoint),
'Polygon': makeArrayPusher(readPolygon)
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @return {import("../geom/Geometry.js").default} Geometry.
*/
function readMultiGeometry(node, objectStack) {
const geometries = pushParseAndPop([],
MULTI_GEOMETRY_PARSERS, node, objectStack);
if (!geometries) {
return null;
}
if (geometries.length === 0) {
return new GeometryCollection(geometries);
}
let multiGeometry;
let homogeneous = true;
const type = geometries[0].getType();
let geometry;
for (let i = 1, ii = geometries.length; i < ii; ++i) {
geometry = geometries[i];
if (geometry.getType() != type) {
homogeneous = false;
break;
}
}
if (homogeneous) {
let layout;
let flatCoordinates;
if (type == GeometryType.POINT) {
const point = geometries[0];
layout = point.getLayout();
flatCoordinates = point.getFlatCoordinates();
for (let i = 1, ii = geometries.length; i < ii; ++i) {
geometry = geometries[i];
extend(flatCoordinates, geometry.getFlatCoordinates());
}
multiGeometry = new MultiPoint(flatCoordinates, layout);
setCommonGeometryProperties(multiGeometry, geometries);
} else if (type == GeometryType.LINE_STRING) {
multiGeometry = new MultiLineString(geometries);
setCommonGeometryProperties(multiGeometry, geometries);
} else if (type == GeometryType.POLYGON) {
multiGeometry = new MultiPolygon(geometries);
setCommonGeometryProperties(multiGeometry, geometries);
} else if (type == GeometryType.GEOMETRY_COLLECTION) {
multiGeometry = new GeometryCollection(geometries);
} else {
assert(false, 37); // Unknown geometry type found
}
} else {
multiGeometry = new GeometryCollection(geometries);
}
return (
/** @type {import("../geom/Geometry.js").default} */ (multiGeometry)
);
}
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @return {Point|undefined} Point.
*/
function readPoint(node, objectStack) {
const properties = pushParseAndPop({},
EXTRUDE_AND_ALTITUDE_MODE_PARSERS, node,
objectStack);
const flatCoordinates =
readFlatCoordinatesFromNode(node, objectStack);
if (flatCoordinates) {
const point = new Point(flatCoordinates, GeometryLayout.XYZ);
point.setProperties(properties, true);
return point;
} else {
return undefined;
}
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const FLAT_LINEAR_RINGS_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'innerBoundaryIs': innerBoundaryIsParser,
'outerBoundaryIs': outerBoundaryIsParser
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @return {Polygon|undefined} Polygon.
*/
function readPolygon(node, objectStack) {
const properties = pushParseAndPop(/** @type {Object<string,*>} */ ({}),
EXTRUDE_AND_ALTITUDE_MODE_PARSERS, node,
objectStack);
const flatLinearRings = pushParseAndPop([null],
FLAT_LINEAR_RINGS_PARSERS, node, objectStack);
if (flatLinearRings && flatLinearRings[0]) {
const flatCoordinates = flatLinearRings[0];
const ends = [flatCoordinates.length];
for (let i = 1, ii = flatLinearRings.length; i < ii; ++i) {
extend(flatCoordinates, flatLinearRings[i]);
ends.push(flatCoordinates.length);
}
const polygon = new Polygon(flatCoordinates, GeometryLayout.XYZ, ends);
polygon.setProperties(properties, true);
return polygon;
} else {
return undefined;
}
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const STYLE_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'IconStyle': iconStyleParser,
'LabelStyle': labelStyleParser,
'LineStyle': lineStyleParser,
'PolyStyle': polyStyleParser
});
/**
* @this {KML}
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
* @return {Array<Style>} Style.
*/
function readStyle(node, objectStack) {
const styleObject = pushParseAndPop(
{}, STYLE_PARSERS, node, objectStack, this);
if (!styleObject) {
return null;
}
let fillStyle = /** @type {Fill} */
('fillStyle' in styleObject ?
styleObject['fillStyle'] : DEFAULT_FILL_STYLE);
const fill = /** @type {boolean|undefined} */ (styleObject['fill']);
if (fill !== undefined && !fill) {
fillStyle = null;
}
let imageStyle;
if ('imageStyle' in styleObject) {
if (styleObject['imageStyle'] != DEFAULT_NO_IMAGE_STYLE) {
imageStyle = styleObject['imageStyle'];
}
} else {
imageStyle = DEFAULT_IMAGE_STYLE;
}
const textStyle = /** @type {Text} */
('textStyle' in styleObject ?
styleObject['textStyle'] : DEFAULT_TEXT_STYLE);
let strokeStyle = /** @type {Stroke} */
('strokeStyle' in styleObject ?
styleObject['strokeStyle'] : DEFAULT_STROKE_STYLE);
const outline = /** @type {boolean|undefined} */
(styleObject['outline']);
if (outline !== undefined && !outline) {
strokeStyle = null;
}
return [new Style({
fill: fillStyle,
image: imageStyle,
stroke: strokeStyle,
text: textStyle,
zIndex: undefined // FIXME
})];
}
/**
* Reads an array of geometries and creates arrays for common geometry
* properties. Then sets them to the multi geometry.
* @param {MultiPoint|MultiLineString|MultiPolygon} multiGeometry A multi-geometry.
* @param {Array<import("../geom/Geometry.js").default>} geometries List of geometries.
*/
function setCommonGeometryProperties(multiGeometry, geometries) {
const ii = geometries.length;
const extrudes = new Array(geometries.length);
const tessellates = new Array(geometries.length);
const altitudeModes = new Array(geometries.length);
let hasExtrude, hasTessellate, hasAltitudeMode;
hasExtrude = false;
hasTessellate = false;
hasAltitudeMode = false;
for (let i = 0; i < ii; ++i) {
const geometry = geometries[i];
extrudes[i] = geometry.get('extrude');
tessellates[i] = geometry.get('tessellate');
altitudeModes[i] = geometry.get('altitudeMode');
hasExtrude = hasExtrude || extrudes[i] !== undefined;
hasTessellate = hasTessellate || tessellates[i] !== undefined;
hasAltitudeMode = hasAltitudeMode || altitudeModes[i];
}
if (hasExtrude) {
multiGeometry.set('extrude', extrudes);
}
if (hasTessellate) {
multiGeometry.set('tessellate', tessellates);
}
if (hasAltitudeMode) {
multiGeometry.set('altitudeMode', altitudeModes);
}
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const DATA_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'displayName': makeObjectPropertySetter(readString),
'value': makeObjectPropertySetter(readString)
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function dataParser(node, objectStack) {
const name = node.getAttribute('name');
parseNode(DATA_PARSERS, node, objectStack);
const featureObject = /** @type {Object} */ (objectStack[objectStack.length - 1]);
if (name && featureObject.displayName) {
featureObject[name] = {
value: featureObject.value,
displayName: featureObject.displayName,
toString: function() {
return featureObject.value;
}
};
} else if (name !== null) {
featureObject[name] = featureObject.value;
} else if (featureObject.displayName !== null) {
featureObject[featureObject.displayName] = featureObject.value;
}
delete featureObject['value'];
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const EXTENDED_DATA_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'Data': dataParser,
'SchemaData': schemaDataParser
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function extendedDataParser(node, objectStack) {
parseNode(EXTENDED_DATA_PARSERS, node, objectStack);
}
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function regionParser(node, objectStack) {
parseNode(REGION_PARSERS, node, objectStack);
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const PAIR_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'Style': makeObjectPropertySetter(readStyle),
'key': makeObjectPropertySetter(readString),
'styleUrl': makeObjectPropertySetter(readURI)
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function pairDataParser(node, objectStack) {
const pairObject = pushParseAndPop(
{}, PAIR_PARSERS, node, objectStack, this);
if (!pairObject) {
return;
}
const key = /** @type {string|undefined} */
(pairObject['key']);
if (key && key == 'normal') {
const styleUrl = /** @type {string|undefined} */
(pairObject['styleUrl']);
if (styleUrl) {
objectStack[objectStack.length - 1] = styleUrl;
}
const style = /** @type {Style} */
(pairObject['Style']);
if (style) {
objectStack[objectStack.length - 1] = style;
}
}
}
/**
* @this {KML}
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function placemarkStyleMapParser(node, objectStack) {
const styleMapValue = readStyleMapValue.call(this, node, objectStack);
if (!styleMapValue) {
return;
}
const placemarkObject = objectStack[objectStack.length - 1];
if (Array.isArray(styleMapValue)) {
placemarkObject['Style'] = styleMapValue;
} else if (typeof styleMapValue === 'string') {
placemarkObject['styleUrl'] = styleMapValue;
} else {
assert(false, 38); // `styleMapValue` has an unknown type
}
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const SCHEMA_DATA_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'SimpleData': simpleDataParser
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function schemaDataParser(node, objectStack) {
parseNode(SCHEMA_DATA_PARSERS, node, objectStack);
}
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function simpleDataParser(node, objectStack) {
const name = node.getAttribute('name');
if (name !== null) {
const data = readString(node);
const featureObject = /** @type {Object} */ (objectStack[objectStack.length - 1]);
featureObject[name] = data;
}
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const LAT_LON_ALT_BOX_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'altitudeMode': makeObjectPropertySetter(readString),
'minAltitude': makeObjectPropertySetter(readDecimal),
'maxAltitude': makeObjectPropertySetter(readDecimal),
'north': makeObjectPropertySetter(readDecimal),
'south': makeObjectPropertySetter(readDecimal),
'east': makeObjectPropertySetter(readDecimal),
'west': makeObjectPropertySetter(readDecimal)
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function latLonAltBoxParser(node, objectStack) {
const object = pushParseAndPop({}, LAT_LON_ALT_BOX_PARSERS, node, objectStack);
if (!object) {
return;
}
const regionObject = /** @type {Object} */ (objectStack[objectStack.length - 1]);
const extent = [
parseFloat(object['west']),
parseFloat(object['south']),
parseFloat(object['east']),
parseFloat(object['north'])
];
regionObject['extent'] = extent;
regionObject['altitudeMode'] = object['altitudeMode'];
regionObject['minAltitude'] = parseFloat(object['minAltitude']);
regionObject['maxAltitude'] = parseFloat(object['maxAltitude']);
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const LOD_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'minLodPixels': makeObjectPropertySetter(readDecimal),
'maxLodPixels': makeObjectPropertySetter(readDecimal),
'minFadeExtent': makeObjectPropertySetter(readDecimal),
'maxFadeExtent': makeObjectPropertySetter(readDecimal)
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function lodParser(node, objectStack) {
const object = pushParseAndPop({}, LOD_PARSERS, node, objectStack);
if (!object) {
return;
}
const lodObject = /** @type {Object} */ (objectStack[objectStack.length - 1]);
lodObject['minLodPixels'] = parseFloat(object['minLodPixels']);
lodObject['maxLodPixels'] = parseFloat(object['maxLodPixels']);
lodObject['minFadeExtent'] = parseFloat(object['minFadeExtent']);
lodObject['maxFadeExtent'] = parseFloat(object['maxFadeExtent']);
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const INNER_BOUNDARY_IS_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'LinearRing': makeReplacer(readFlatLinearRing)
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function innerBoundaryIsParser(node, objectStack) {
/** @type {Array<number>|undefined} */
const flatLinearRing = pushParseAndPop(undefined,
INNER_BOUNDARY_IS_PARSERS, node, objectStack);
if (flatLinearRing) {
const flatLinearRings = /** @type {Array<Array<number>>} */
(objectStack[objectStack.length - 1]);
flatLinearRings.push(flatLinearRing);
}
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Parser>>}
*/
// @ts-ignore
const OUTER_BOUNDARY_IS_PARSERS = makeStructureNS(
NAMESPACE_URIS, {
'LinearRing': makeReplacer(readFlatLinearRing)
});
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function outerBoundaryIsParser(node, objectStack) {
/** @type {Array<number>|undefined} */
const flatLinearRing = pushParseAndPop(undefined,
OUTER_BOUNDARY_IS_PARSERS, node, objectStack);
if (flatLinearRing) {
const flatLinearRings = /** @type {Array<Array<number>>} */
(objectStack[objectStack.length - 1]);
flatLinearRings[0] = flatLinearRing;
}
}
/**
* @param {Element} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function linkParser(node, objectStack) {
parseNode(LINK_PARSERS, node, objectStack);
}
/**
* @param {Node} node Node.
* @param {Array<*>} objectStack Object stack.
*/
function whenParser(node, objectStack) {
const gxTrackObject = /** @type {GxTrackObject} */
(objectStack[objectStack.length - 1]);
const whens = gxTrackObject.whens;
const s = getAllTextContent(node, false);
const when = Date.parse(s);
whens.push(isNaN(when) ? 0 : when);
}
/**
* @param {Node} node Node to append a TextNode with the color to.
* @param {import("../color.js").Color|string} color Color.
*/
function writeColorTextNode(node, color) {
const rgba = asArray(color);
const opacity = (rgba.length == 4) ? rgba[3] : 1;
/** @type {Array<string|number>} */
const abgr = [opacity * 255, rgba[2], rgba[1], rgba[0]];
for (let i = 0; i < 4; ++i) {
const hex = Math.floor(/** @type {number} */ (abgr[i])).toString(16);
abgr[i] = (hex.length == 1) ? '0' + hex : hex;
}
writeStringTextNode(node, abgr.join(''));
}
/**
* @param {Node} node Node to append a TextNode with the coordinates to.
* @param {Array<number>} coordinates Coordinates.
* @param {Array<*>} objectStack Object stack.
*/
function writeCoordinatesTextNode(node, coordinates, objectStack) {
const context = objectStack[objectStack.length - 1];
const layout = context['layout'];
const stride = context['stride'];
let dimension;
if (layout == GeometryLayout.XY ||
layout == GeometryLayout.XYM) {
dimension = 2;
} else if (layout == GeometryLayout.XYZ ||
layout == GeometryLayout.XYZM) {
dimension = 3;
} else {
assert(false, 34); // Invalid geometry layout
}
const ii = coordinates.length;
let text = '';
if (ii > 0) {
text += coordinates[0];
for (let d = 1; d < dimension; ++d) {
text += ',' + coordinates[d];
}
for (let i = stride; i < ii; i += stride) {
text += ' ' + coordinates[i];
for (let d = 1; d < dimension; ++d) {
text += ',' + coordinates[i + d];
}
}
}
writeStringTextNode(node, text);
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Serializer>>}
*/
// @ts-ignore
const EXTENDEDDATA_NODE_SERIALIZERS = makeStructureNS(
NAMESPACE_URIS, {
'Data': makeChildAppender(writeDataNode),
'value': makeChildAppender(writeDataNodeValue),
'displayName': makeChildAppender(writeDataNodeName)
});
/**
* @param {Element} node Node.
* @param {{name: *, value: *}} pair Name value pair.
* @param {Array<*>} objectStack Object stack.
*/
function writeDataNode(node, pair, objectStack) {
node.setAttribute('name', pair.name);
const /** @type {import("../xml.js").NodeStackItem} */ context = {node: node};
const value = pair.value;
if (typeof value == 'object') {
if (value !== null && value.displayName) {
pushSerializeAndPop(context, EXTENDEDDATA_NODE_SERIALIZERS,
OBJECT_PROPERTY_NODE_FACTORY, [value.displayName], objectStack, ['displayName']);
}
if (value !== null && value.value) {
pushSerializeAndPop(context, EXTENDEDDATA_NODE_SERIALIZERS,
OBJECT_PROPERTY_NODE_FACTORY, [value.value], objectStack, ['value']);
}
} else {
pushSerializeAndPop(context, EXTENDEDDATA_NODE_SERIALIZERS,
OBJECT_PROPERTY_NODE_FACTORY, [value], objectStack, ['value']);
}
}
/**
* @param {Node} node Node to append a TextNode with the name to.
* @param {string} name DisplayName.
*/
function writeDataNodeName(node, name) {
writeCDATASection(node, name);
}
/**
* @param {Node} node Node to append a CDATA Section with the value to.
* @param {string} value Value.
*/
function writeDataNodeValue(node, value) {
writeStringTextNode(node, value);
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Serializer>>}
*/
// @ts-ignore
const DOCUMENT_SERIALIZERS = makeStructureNS(
NAMESPACE_URIS, {
'Placemark': makeChildAppender(writePlacemark)
});
/**
* @const
* @param {*} value Value.
* @param {Array<*>} objectStack Object stack.
* @param {string=} opt_nodeName Node name.
* @return {Node|undefined} Node.
*/
const DOCUMENT_NODE_FACTORY = function(value, objectStack, opt_nodeName) {
const parentNode = objectStack[objectStack.length - 1].node;
return createElementNS(parentNode.namespaceURI, 'Placemark');
};
/**
* @param {Node} node Node.
* @param {Array<Feature>} features Features.
* @param {Array<*>} objectStack Object stack.
* @this {KML}
*/
function writeDocument(node, features, objectStack) {
const /** @type {import("../xml.js").NodeStackItem} */ context = {node: node};
pushSerializeAndPop(context, DOCUMENT_SERIALIZERS,
DOCUMENT_NODE_FACTORY, features, objectStack, undefined,
this);
}
/**
* A factory for creating Data nodes.
* @const
* @type {function(*, Array<*>): (Node|undefined)}
*/
const DATA_NODE_FACTORY = makeSimpleNodeFactory('Data');
/**
* @param {Node} node Node.
* @param {{names: Array<string>, values: (Array<*>)}} namesAndValues Names and values.
* @param {Array<*>} objectStack Object stack.
*/
function writeExtendedData(node, namesAndValues, objectStack) {
const /** @type {import("../xml.js").NodeStackItem} */ context = {node: node};
const names = namesAndValues.names;
const values = namesAndValues.values;
const length = names.length;
for (let i = 0; i < length; i++) {
pushSerializeAndPop(context, EXTENDEDDATA_NODE_SERIALIZERS,
DATA_NODE_FACTORY, [{name: names[i], value: values[i]}], objectStack);
}
}
/**
* @const
* @type {Object<string, Array<string>>}
*/
// @ts-ignore
const ICON_SEQUENCE = makeStructureNS(
NAMESPACE_URIS, [
'href'
],
makeStructureNS(GX_NAMESPACE_URIS, [
'x', 'y', 'w', 'h'
]));
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Serializer>>}
*/
// @ts-ignore
const ICON_SERIALIZERS = makeStructureNS(
NAMESPACE_URIS, {
'href': makeChildAppender(writeStringTextNode)
}, makeStructureNS(
GX_NAMESPACE_URIS, {
'x': makeChildAppender(writeDecimalTextNode),
'y': makeChildAppender(writeDecimalTextNode),
'w': makeChildAppender(writeDecimalTextNode),
'h': makeChildAppender(writeDecimalTextNode)
}));
/**
* @const
* @param {*} value Value.
* @param {Array<*>} objectStack Object stack.
* @param {string=} opt_nodeName Node name.
* @return {Node|undefined} Node.
*/
const GX_NODE_FACTORY = function(value, objectStack, opt_nodeName) {
return createElementNS(GX_NAMESPACE_URIS[0],
'gx:' + opt_nodeName);
};
/**
* @param {Node} node Node.
* @param {Object} icon Icon object.
* @param {Array<*>} objectStack Object stack.
*/
function writeIcon(node, icon, objectStack) {
const /** @type {import("../xml.js").NodeStackItem} */ context = {node: node};
const parentNode = objectStack[objectStack.length - 1].node;
let orderedKeys = ICON_SEQUENCE[parentNode.namespaceURI];
let values = makeSequence(icon, orderedKeys);
pushSerializeAndPop(context,
ICON_SERIALIZERS, OBJECT_PROPERTY_NODE_FACTORY,
values, objectStack, orderedKeys);
orderedKeys =
ICON_SEQUENCE[GX_NAMESPACE_URIS[0]];
values = makeSequence(icon, orderedKeys);
pushSerializeAndPop(context, ICON_SERIALIZERS,
GX_NODE_FACTORY, values, objectStack, orderedKeys);
}
/**
* @const
* @type {Object<string, Array<string>>}
*/
// @ts-ignore
const ICON_STYLE_SEQUENCE = makeStructureNS(
NAMESPACE_URIS, [
'scale', 'heading', 'Icon', 'hotSpot'
]);
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Serializer>>}
*/
// @ts-ignore
const ICON_STYLE_SERIALIZERS = makeStructureNS(
NAMESPACE_URIS, {
'Icon': makeChildAppender(writeIcon),
'heading': makeChildAppender(writeDecimalTextNode),
'hotSpot': makeChildAppender(writeVec2),
'scale': makeChildAppender(writeScaleTextNode)
});
/**
* @param {Node} node Node.
* @param {import("../style/Icon.js").default} style Icon style.
* @param {Array<*>} objectStack Object stack.
*/
function writeIconStyle(node, style, objectStack) {
const /** @type {import("../xml.js").NodeStackItem} */ context = {node: node};
const /** @type {Object<string, any>} */ properties = {};
const src = style.getSrc();
const size = style.getSize();
const iconImageSize = style.getImageSize();
const iconProperties = {
'href': src
};
if (size) {
iconProperties['w'] = size[0];
iconProperties['h'] = size[1];
const anchor = style.getAnchor(); // top-left
const origin = style.getOrigin(); // top-left
if (origin && iconImageSize && origin[0] !== 0 && origin[1] !== size[1]) {
iconProperties['x'] = origin[0];
iconProperties['y'] = iconImageSize[1] - (origin[1] + size[1]);
}
if (anchor && (anchor[0] !== size[0] / 2 || anchor[1] !== size[1] / 2)) {
const /** @type {Vec2} */ hotSpot = {
x: anchor[0],
xunits: IconAnchorUnits.PIXELS,
y: size[1] - anchor[1],
yunits: IconAnchorUnits.PIXELS
};
properties['hotSpot'] = hotSpot;
}
}
properties['Icon'] = iconProperties;
const scale = style.getScale();
if (scale !== 1) {
properties['scale'] = scale;
}
const rotation = style.getRotation();
if (rotation !== 0) {
properties['heading'] = rotation; // 0-360
}
const parentNode = objectStack[objectStack.length - 1].node;
const orderedKeys = ICON_STYLE_SEQUENCE[parentNode.namespaceURI];
const values = makeSequence(properties, orderedKeys);
pushSerializeAndPop(context, ICON_STYLE_SERIALIZERS,
OBJECT_PROPERTY_NODE_FACTORY, values, objectStack, orderedKeys);
}
/**
* @const
* @type {Object<string, Array<string>>}
*/
// @ts-ignore
const LABEL_STYLE_SEQUENCE = makeStructureNS(
NAMESPACE_URIS, [
'color', 'scale'
]);
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Serializer>>}
*/
// @ts-ignore
const LABEL_STYLE_SERIALIZERS = makeStructureNS(
NAMESPACE_URIS, {
'color': makeChildAppender(writeColorTextNode),
'scale': makeChildAppender(writeScaleTextNode)
});
/**
* @param {Node} node Node.
* @param {Text} style style.
* @param {Array<*>} objectStack Object stack.
*/
function writeLabelStyle(node, style, objectStack) {
const /** @type {import("../xml.js").NodeStackItem} */ context = {node: node};
const properties = {};
const fill = style.getFill();
if (fill) {
properties['color'] = fill.getColor();
}
const scale = style.getScale();
if (scale && scale !== 1) {
properties['scale'] = scale;
}
const parentNode = objectStack[objectStack.length - 1].node;
const orderedKeys =
LABEL_STYLE_SEQUENCE[parentNode.namespaceURI];
const values = makeSequence(properties, orderedKeys);
pushSerializeAndPop(context, LABEL_STYLE_SERIALIZERS,
OBJECT_PROPERTY_NODE_FACTORY, values, objectStack, orderedKeys);
}
/**
* @const
* @type {Object<string, Array<string>>}
*/
// @ts-ignore
const LINE_STYLE_SEQUENCE = makeStructureNS(
NAMESPACE_URIS, [
'color', 'width'
]);
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Serializer>>}
*/
// @ts-ignore
const LINE_STYLE_SERIALIZERS = makeStructureNS(
NAMESPACE_URIS, {
'color': makeChildAppender(writeColorTextNode),
'width': makeChildAppender(writeDecimalTextNode)
});
/**
* @param {Node} node Node.
* @param {Stroke} style style.
* @param {Array<*>} objectStack Object stack.
*/
function writeLineStyle(node, style, objectStack) {
const /** @type {import("../xml.js").NodeStackItem} */ context = {node: node};
const properties = {
'color': style.getColor(),
'width': style.getWidth()
};
const parentNode = objectStack[objectStack.length - 1].node;
const orderedKeys = LINE_STYLE_SEQUENCE[parentNode.namespaceURI];
const values = makeSequence(properties, orderedKeys);
pushSerializeAndPop(context, LINE_STYLE_SERIALIZERS,
OBJECT_PROPERTY_NODE_FACTORY, values, objectStack, orderedKeys);
}
/**
* @const
* @type {Object<string, string>}
*/
const GEOMETRY_TYPE_TO_NODENAME = {
'Point': 'Point',
'LineString': 'LineString',
'LinearRing': 'LinearRing',
'Polygon': 'Polygon',
'MultiPoint': 'MultiGeometry',
'MultiLineString': 'MultiGeometry',
'MultiPolygon': 'MultiGeometry',
'GeometryCollection': 'MultiGeometry'
};
/**
* @const
* @param {*} value Value.
* @param {Array<*>} objectStack Object stack.
* @param {string=} opt_nodeName Node name.
* @return {Node|undefined} Node.
*/
const GEOMETRY_NODE_FACTORY = function(value, objectStack, opt_nodeName) {
if (value) {
const parentNode = objectStack[objectStack.length - 1].node;
return createElementNS(parentNode.namespaceURI,
GEOMETRY_TYPE_TO_NODENAME[/** @type {import("../geom/Geometry.js").default} */ (value).getType()]);
}
};
/**
* A factory for creating Point nodes.
* @const
* @type {function(*, Array<*>, string=): (Node|undefined)}
*/
const POINT_NODE_FACTORY = makeSimpleNodeFactory('Point');
/**
* A factory for creating LineString nodes.
* @const
* @type {function(*, Array<*>, string=): (Node|undefined)}
*/
const LINE_STRING_NODE_FACTORY = makeSimpleNodeFactory('LineString');
/**
* A factory for creating LinearRing nodes.
* @const
* @type {function(*, Array<*>, string=): (Node|undefined)}
*/
const LINEAR_RING_NODE_FACTORY = makeSimpleNodeFactory('LinearRing');
/**
* A factory for creating Polygon nodes.
* @const
* @type {function(*, Array<*>, string=): (Node|undefined)}
*/
const POLYGON_NODE_FACTORY = makeSimpleNodeFactory('Polygon');
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Serializer>>}
*/
// @ts-ignore
const MULTI_GEOMETRY_SERIALIZERS = makeStructureNS(
NAMESPACE_URIS, {
'LineString': makeChildAppender(
writePrimitiveGeometry),
'Point': makeChildAppender(
writePrimitiveGeometry),
'Polygon': makeChildAppender(writePolygon),
'GeometryCollection': makeChildAppender(
writeMultiGeometry)
});
/**
* @param {Node} node Node.
* @param {import("../geom/Geometry.js").default} geometry Geometry.
* @param {Array<*>} objectStack Object stack.
*/
function writeMultiGeometry(node, geometry, objectStack) {
/** @type {import("../xml.js").NodeStackItem} */
const context = {node: node};
const type = geometry.getType();
/** @type {Array<import("../geom/Geometry.js").default>} */
let geometries;
/** @type {function(*, Array<*>, string=): (Node|undefined)} */
let factory;
if (type == GeometryType.GEOMETRY_COLLECTION) {
geometries = /** @type {GeometryCollection} */ (geometry).getGeometries();
factory = GEOMETRY_NODE_FACTORY;
} else if (type == GeometryType.MULTI_POINT) {
geometries = /** @type {MultiPoint} */ (geometry).getPoints();
factory = POINT_NODE_FACTORY;
} else if (type == GeometryType.MULTI_LINE_STRING) {
geometries =
(/** @type {MultiLineString} */ (geometry)).getLineStrings();
factory = LINE_STRING_NODE_FACTORY;
} else if (type == GeometryType.MULTI_POLYGON) {
geometries =
(/** @type {MultiPolygon} */ (geometry)).getPolygons();
factory = POLYGON_NODE_FACTORY;
} else {
assert(false, 39); // Unknown geometry type
}
pushSerializeAndPop(context,
MULTI_GEOMETRY_SERIALIZERS, factory,
geometries, objectStack);
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Serializer>>}
*/
// @ts-ignore
const BOUNDARY_IS_SERIALIZERS = makeStructureNS(
NAMESPACE_URIS, {
'LinearRing': makeChildAppender(
writePrimitiveGeometry)
});
/**
* @param {Node} node Node.
* @param {import("../geom/LinearRing.js").default} linearRing Linear ring.
* @param {Array<*>} objectStack Object stack.
*/
function writeBoundaryIs(node, linearRing, objectStack) {
const /** @type {import("../xml.js").NodeStackItem} */ context = {node: node};
pushSerializeAndPop(context,
BOUNDARY_IS_SERIALIZERS,
LINEAR_RING_NODE_FACTORY, [linearRing], objectStack);
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Serializer>>}
*/
// @ts-ignore
const PLACEMARK_SERIALIZERS = makeStructureNS(
NAMESPACE_URIS, {
'ExtendedData': makeChildAppender(writeExtendedData),
'MultiGeometry': makeChildAppender(writeMultiGeometry),
'LineString': makeChildAppender(writePrimitiveGeometry),
'LinearRing': makeChildAppender(writePrimitiveGeometry),
'Point': makeChildAppender(writePrimitiveGeometry),
'Polygon': makeChildAppender(writePolygon),
'Style': makeChildAppender(writeStyle),
'address': makeChildAppender(writeStringTextNode),
'description': makeChildAppender(writeStringTextNode),
'name': makeChildAppender(writeStringTextNode),
'open': makeChildAppender(writeBooleanTextNode),
'phoneNumber': makeChildAppender(writeStringTextNode),
'styleUrl': makeChildAppender(writeStringTextNode),
'visibility': makeChildAppender(writeBooleanTextNode)
});
/**
* @const
* @type {Object<string, Array<string>>}
*/
// @ts-ignore
const PLACEMARK_SEQUENCE = makeStructureNS(
NAMESPACE_URIS, [
'name', 'open', 'visibility', 'address', 'phoneNumber', 'description',
'styleUrl', 'Style'
]);
/**
* A factory for creating ExtendedData nodes.
* @const
* @type {function(*, Array<*>): (Node|undefined)}
*/
const EXTENDEDDATA_NODE_FACTORY = makeSimpleNodeFactory('ExtendedData');
/**
* FIXME currently we do serialize arbitrary/custom feature properties
* (ExtendedData).
* @param {Element} node Node.
* @param {Feature} feature Feature.
* @param {Array<*>} objectStack Object stack.
* @this {KML}
*/
function writePlacemark(node, feature, objectStack) {
const /** @type {import("../xml.js").NodeStackItem} */ context = {node: node};
// set id
if (feature.getId()) {
node.setAttribute('id', /** @type {string} */ (feature.getId()));
}
// serialize properties (properties unknown to KML are not serialized)
const properties = feature.getProperties();
// don't export these to ExtendedData
const filter = {'address': 1, 'description': 1, 'name': 1, 'open': 1,
'phoneNumber': 1, 'styleUrl': 1, 'visibility': 1};
filter[feature.getGeometryName()] = 1;
const keys = Object.keys(properties || {}).sort().filter(function(v) {
return !filter[v];
});
const styleFunction = feature.getStyleFunction();
if (styleFunction) {
// FIXME the styles returned by the style function are supposed to be
// resolution-independent here
const styles = styleFunction(feature, 0);
if (styles) {
const style = Array.isArray(styles) ? styles[0] : styles;
if (this.writeStyles_) {
properties['Style'] = style;
}
const textStyle = style.getText();
if (textStyle) {
properties['name'] = textStyle.getText();
}
}
}
const parentNode = objectStack[objectStack.length - 1].node;
const orderedKeys = PLACEMARK_SEQUENCE[parentNode.namespaceURI];
const values = makeSequence(properties, orderedKeys);
pushSerializeAndPop(context, PLACEMARK_SERIALIZERS,
OBJECT_PROPERTY_NODE_FACTORY, values, objectStack, orderedKeys);
if (keys.length > 0) {
const sequence = makeSequence(properties, keys);
const namesAndValues = {names: keys, values: sequence};
pushSerializeAndPop(context, PLACEMARK_SERIALIZERS,
EXTENDEDDATA_NODE_FACTORY, [namesAndValues], objectStack);
}
// serialize geometry
const options = /** @type {import("./Feature.js").WriteOptions} */ (objectStack[0]);
let geometry = feature.getGeometry();
if (geometry) {
geometry = transformGeometryWithOptions(geometry, true, options);
}
pushSerializeAndPop(context, PLACEMARK_SERIALIZERS,
GEOMETRY_NODE_FACTORY, [geometry], objectStack);
}
/**
* @const
* @type {Object<string, Array<string>>}
*/
// @ts-ignore
const PRIMITIVE_GEOMETRY_SEQUENCE = makeStructureNS(
NAMESPACE_URIS, [
'extrude', 'tessellate', 'altitudeMode', 'coordinates'
]);
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Serializer>>}
*/
// @ts-ignore
const PRIMITIVE_GEOMETRY_SERIALIZERS = makeStructureNS(
NAMESPACE_URIS, {
'extrude': makeChildAppender(writeBooleanTextNode),
'tessellate': makeChildAppender(writeBooleanTextNode),
'altitudeMode': makeChildAppender(writeStringTextNode),
'coordinates': makeChildAppender(writeCoordinatesTextNode)
});
/**
* @param {Node} node Node.
* @param {import("../geom/SimpleGeometry.js").default} geometry Geometry.
* @param {Array<*>} objectStack Object stack.
*/
function writePrimitiveGeometry(node, geometry, objectStack) {
const flatCoordinates = geometry.getFlatCoordinates();
const /** @type {import("../xml.js").NodeStackItem} */ context = {node: node};
context['layout'] = geometry.getLayout();
context['stride'] = geometry.getStride();
// serialize properties (properties unknown to KML are not serialized)
const properties = geometry.getProperties();
properties.coordinates = flatCoordinates;
const parentNode = objectStack[objectStack.length - 1].node;
const orderedKeys = PRIMITIVE_GEOMETRY_SEQUENCE[parentNode.namespaceURI];
const values = makeSequence(properties, orderedKeys);
pushSerializeAndPop(context, PRIMITIVE_GEOMETRY_SERIALIZERS,
OBJECT_PROPERTY_NODE_FACTORY, values, objectStack, orderedKeys);
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Serializer>>}
*/
// @ts-ignore
const POLYGON_SERIALIZERS = makeStructureNS(
NAMESPACE_URIS, {
'outerBoundaryIs': makeChildAppender(
writeBoundaryIs),
'innerBoundaryIs': makeChildAppender(
writeBoundaryIs)
});
/**
* A factory for creating innerBoundaryIs nodes.
* @const
* @type {function(*, Array<*>, string=): (Node|undefined)}
*/
const INNER_BOUNDARY_NODE_FACTORY = makeSimpleNodeFactory('innerBoundaryIs');
/**
* A factory for creating outerBoundaryIs nodes.
* @const
* @type {function(*, Array<*>, string=): (Node|undefined)}
*/
const OUTER_BOUNDARY_NODE_FACTORY = makeSimpleNodeFactory('outerBoundaryIs');
/**
* @param {Node} node Node.
* @param {Polygon} polygon Polygon.
* @param {Array<*>} objectStack Object stack.
*/
function writePolygon(node, polygon, objectStack) {
const linearRings = polygon.getLinearRings();
const outerRing = linearRings.shift();
const /** @type {import("../xml.js").NodeStackItem} */ context = {node: node};
// inner rings
pushSerializeAndPop(context,
POLYGON_SERIALIZERS,
INNER_BOUNDARY_NODE_FACTORY,
linearRings, objectStack);
// outer ring
pushSerializeAndPop(context,
POLYGON_SERIALIZERS,
OUTER_BOUNDARY_NODE_FACTORY,
[outerRing], objectStack);
}
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Serializer>>}
*/
// @ts-ignore
const POLY_STYLE_SERIALIZERS = makeStructureNS(
NAMESPACE_URIS, {
'color': makeChildAppender(writeColorTextNode)
});
/**
* A factory for creating coordinates nodes.
* @const
* @type {function(*, Array<*>, string=): (Node|undefined)}
*/
const COLOR_NODE_FACTORY = makeSimpleNodeFactory('color');
/**
* @param {Node} node Node.
* @param {Fill} style Style.
* @param {Array<*>} objectStack Object stack.
*/
function writePolyStyle(node, style, objectStack) {
const /** @type {import("../xml.js").NodeStackItem} */ context = {node: node};
pushSerializeAndPop(context, POLY_STYLE_SERIALIZERS,
COLOR_NODE_FACTORY, [style.getColor()], objectStack);
}
/**
* @param {Node} node Node to append a TextNode with the scale to.
* @param {number|undefined} scale Scale.
*/
function writeScaleTextNode(node, scale) {
// the Math is to remove any excess decimals created by float arithmetic
writeDecimalTextNode(node,
Math.round(scale * 1e6) / 1e6);
}
/**
* @const
* @type {Object<string, Array<string>>}
*/
// @ts-ignore
const STYLE_SEQUENCE = makeStructureNS(
NAMESPACE_URIS, [
'IconStyle', 'LabelStyle', 'LineStyle', 'PolyStyle'
]);
/**
* @const
* @type {Object<string, Object<string, import("../xml.js").Serializer>>}
*/
// @ts-ignore
const STYLE_SERIALIZERS = makeStructureNS(
NAMESPACE_URIS, {
'IconStyle': makeChildAppender(writeIconStyle),
'LabelStyle': makeChildAppender(writeLabelStyle),
'LineStyle': makeChildAppender(writeLineStyle),
'PolyStyle': makeChildAppender(writePolyStyle)
});
/**
* @param {Node} node Node.
* @param {Style} style Style.
* @param {Array<*>} objectStack Object stack.
*/
function writeStyle(node, style, objectStack) {
const /** @type {import("../xml.js").NodeStackItem} */ context = {node: node};
const properties = {};
const fillStyle = style.getFill();
const strokeStyle = style.getStroke();
const imageStyle = style.getImage();
const textStyle = style.getText();
if (imageStyle && typeof /** @type {?} */ (imageStyle).getSrc === 'function') {
properties['IconStyle'] = imageStyle;
}
if (textStyle) {
properties['LabelStyle'] = textStyle;
}
if (strokeStyle) {
properties['LineStyle'] = strokeStyle;
}
if (fillStyle) {
properties['PolyStyle'] = fillStyle;
}
const parentNode = objectStack[objectStack.length - 1].node;
const orderedKeys = STYLE_SEQUENCE[parentNode.namespaceURI];
const values = makeSequence(properties, orderedKeys);
pushSerializeAndPop(context, STYLE_SERIALIZERS,
OBJECT_PROPERTY_NODE_FACTORY, values, objectStack, orderedKeys);
}
/**
* @param {Element} node Node to append a TextNode with the Vec2 to.
* @param {Vec2} vec2 Vec2.
*/
function writeVec2(node, vec2) {
node.setAttribute('x', String(vec2.x));
node.setAttribute('y', String(vec2.y));
node.setAttribute('xunits', vec2.xunits);
node.setAttribute('yunits', vec2.yunits);
}
export default KML;
|
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var React = _interopRequireWildcard(require("react"));
var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon"));
var _default = (0, _createSvgIcon.default)( /*#__PURE__*/React.createElement("path", {
d: "M10 12c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zM6 8c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zm0 8c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zm12-8c1.1 0 2-.9 2-2s-.9-2-2-2-2 .9-2 2 .9 2 2 2zm-4 8c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zm4-4c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zm-4-4c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2zm-4-4c-1.1 0-2 .9-2 2s.9 2 2 2 2-.9 2-2-.9-2-2-2z"
}), 'GrainSharp');
exports.default = _default; |
# -*- coding: utf-8 -*-
"""
[template]
[Template] classes for Astrodynamic Toolkit
Copyright (c) 2017 - Michael Kessel (mailto: [email protected])
a.k.a. RocketRedNeck, RocketRedNeck.com, RocketRedNeck.net
RocketRedNeck and MIT Licenses
RocketRedNeck hereby grants license for others to copy and modify this source code for
whatever purpose other's deem worthy as long as RocketRedNeck is given credit where
where credit is due and you leave RocketRedNeck out of it for all other nefarious purposes.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
****************************************************************************************************
"""
|
# -*- coding: utf-8 -*-
"""
Unit tests for Aligned Hierarchies, assemble.py
"""
import unittest
import numpy as np
from repytah.assemble import breakup_overlaps_by_intersect
from repytah.assemble import check_overlaps
from repytah.assemble import __compare_and_cut as compare_and_cut
from repytah.assemble import __num_of_parts as num_of_parts
from repytah.assemble import __inds_to_rows as inds_to_rows
from repytah.assemble import __merge_based_on_length as merge_based_on_length
from repytah.assemble import __merge_rows as merge_rows
from repytah.assemble import hierarchical_structure
class TestAssemble(unittest.TestCase):
def test_breakup_overlaps_by_intersect(self):
"""
Tests if breakup_overlap_by_intersect gives the correct output
accessible via a tuple for an example.
"""
input_pattern_obj = np.array([
[1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0]
])
bw_vec = np.array([[3],
[5],
[8],
[8]])
thresh_bw = 0
output = breakup_overlaps_by_intersect(input_pattern_obj, bw_vec,
thresh_bw)
expect_output0 = np.array([
[1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0]
])
expect_output1 = np.array([[3],
[5]])
self.assertIs(type(output), tuple)
self.assertEqual(output[0].tolist(), expect_output0.tolist())
self.assertEqual(output[1].tolist(), expect_output1.tolist())
def test_check_overlaps(self):
"""
Tests if check_overlaps gives the correct output with the correct data
type and size for an example case.
"""
input_mat = np.array([[1, 1, 0, 1, 0, 0],
[1, 1, 1, 0, 1, 0],
[0, 1, 1, 0, 0, 1],
[1, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 1, 0],
[0, 0, 1, 0, 0, 1]])
expect_output = np.array([[0, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 1],
[0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]])
output = check_overlaps(input_mat)
self.assertIs(type(output), np.ndarray)
self.assertEqual(np.size(output), np.size(expect_output))
self.assertEqual(output.tolist(), expect_output.tolist())
def test_compare_and_cut(self):
"""
Tests if __compare_and_cut gives the correct output accessible via a
tuple for an example.
"""
red = np.array(
[1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0]
)
red_len = np.array([5])
blue = np.array(
[1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0]
)
blue_len = np.array([3])
output = compare_and_cut(red, red_len, blue, blue_len)
expect_output0 = np.array([
[1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0],
[1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]
])
expect_output1 = np.array([[1],
[1],
[2]])
self.assertIs(type(output), tuple)
self.assertEqual(output[0].tolist(), expect_output0.tolist())
self.assertEqual(output[1].tolist(), expect_output1.tolist())
def test_num_of_parts_if_statement(self):
"""
Tests if __num_of_parts gives the correct output accessible via a tuple
for an example when the if clause is entered
(i.e. if the input vector has no breaks).
"""
input_vec = np.array([3, 4])
input_start = np.array([0])
input_all_starts = np.array([3, 7, 10])
expect_output0 = np.array([6, 10, 13])
expect_output1 = 2
output = num_of_parts(input_vec, input_start, input_all_starts)
self.assertIs(type(output), tuple)
self.assertEqual(output[0].tolist(), expect_output0.tolist())
self.assertEqual(output[1], expect_output1)
def test_num_of_parts_else_statement(self):
"""
Tests if __num_of_parts gives the correct output accessible via a tuple
for an example case when the else clause is entered
(i.e. if the input vector has a break).
"""
input_vec = np.array([3, 5])
input_start = np.array([3])
input_all_starts = np.array([3, 7, 10])
expect_output0 = np.array([[3, 7, 10],
[5, 9, 12]])
expect_output1 = np.array([[1],
[1]])
output = num_of_parts(input_vec, input_start, input_all_starts)
self.assertIs(type(output), tuple)
self.assertEqual(output[0].tolist(), expect_output0.tolist())
self.assertEqual(output[1].tolist(), expect_output1.tolist())
def test_inds_to_rows(self):
"""
Tests if __inds_to_rows gives the correct output with the correct data
type and size for an example case.
"""
start_mat = np.array([0, 1, 6, 7])
row_length = 10
expect_output = np.array([[1, 1, 0, 0, 0, 0, 1, 1, 0, 0]])
output = inds_to_rows(start_mat, row_length)
self.assertIs(type(output), np.ndarray)
self.assertEqual(np.size(output), np.size(expect_output))
self.assertEqual(output.tolist(), expect_output.tolist())
def test_merge_based_on_length(self):
"""
Tests if __merge_based_on_length gives the correct output accessible
via a tuple for an example case.
"""
full_mat = np.array([
[0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0]
])
full_bw = np.array([[2],
[2]])
target_bw = np.array([[2],
[2]])
output = merge_based_on_length(full_mat, full_bw, target_bw)
expect_output0 = np.array([
[1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0]
])
expect_output1 = np.array([2])
self.assertIs(type(output), tuple)
self.assertEqual(output[0].tolist(), expect_output0.tolist())
self.assertEqual(output[1].tolist(), expect_output1.tolist())
def test_merge_rows(self):
"""
Tests if __merge_rows gives the correct output with the correct data
type and size for an example case.
"""
input_mat = np.array([
[0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0]
])
input_width = np.array([1])
output = merge_rows(input_mat, input_width)
expect_output = np.array([
[1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0]
])
self.assertIs(type(output), np.ndarray)
self.assertEqual(np.size(output), np.size(expect_output))
self.assertEqual(output.tolist(), expect_output.tolist())
def test_hierarchical_structure(self):
"""
Tests if hierarchical_structure gives the correct output accessible via
a tuple for an example case.
"""
input_matrix_no = np.array([
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
])
input_key_no = np.array([[5],
[10]])
input_sn = 20
output = hierarchical_structure(input_matrix_no, input_key_no,
input_sn)
expect_output0 = np.array([
[1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
])
expect_output1 = np.array([[5]])
expect_output2 = np.array([
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0]
])
self.assertIs(type(output), tuple)
self.assertEqual(output[0].tolist(), expect_output0.tolist())
self.assertEqual(output[1].tolist(), expect_output1.tolist())
self.assertEqual(output[2].tolist(), expect_output2.tolist())
def test_hierarchical_structure_equal_with_boolean(self):
"""
Tests if hierarchical_structure gives the same output for vis=True
and vis=False as visualizations are just shown.
"""
input_matrix_no = np.array([
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
])
input_key_no = np.array([[5],
[10]])
input_sn = 20
output_false = hierarchical_structure(input_matrix_no, input_key_no,
input_sn) # default vis=False
output_true = hierarchical_structure(input_matrix_no, input_key_no,
input_sn, vis=True)
self.assertEqual(output_false[0].tolist(), output_true[0].tolist())
self.assertEqual(output_false[1].tolist(), output_true[1].tolist())
self.assertEqual(output_false[2].tolist(), output_true[2].tolist())
self.assertEqual(output_false[3].tolist(), output_true[3].tolist())
if __name__ == '__main__':
unittest.main() |
# Generated by Django 2.2.13 on 2020-06-06 11:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('front', '0003_auto_20200606_1118'),
]
operations = [
migrations.AddField(
model_name='hotel',
name='title_1',
field=models.CharField(default='', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='hotel',
name='title_2',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='hotel',
name='title_3',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
|
/**
* @depends StyleableBase.js
*/
////////////////////////////////////////////////////////
////////////////////ShapeBase///////////////////////////
/**
* @class ShapeBase
* @inherits StyleableBase
*
* Abstract base class for drawing vector shape paths.
* This is used by CanvasElements when drawing their background shape
* and can be assigned to CanvasElement's "BackgroundShape" style.
* When sub-classing, add any necessary styles and implement the drawShape() function.
*
* @constructor ShapeBase
* Creates new ShapeBase instance.
*/
function ShapeBase()
{
ShapeBase.base.prototype.constructor.call(this);
}
//Inherit from StyleableBase
ShapeBase.prototype = Object.create(StyleableBase.prototype);
ShapeBase.prototype.constructor = ShapeBase;
ShapeBase.base = StyleableBase;
////////////Public//////////////////////
/**
* @function drawShape
* Used to draw a sub-path shape path to the supplied Canvas2DContext using the supplied metrics.
* Override this to draw custom shapes. Do *not* call beginPath() as that will destroy previous
* sub-paths and *do not* do any filling or other context calls. Only draw and closePath() the sub-path.
*
* @param ctx Canvas2DContext
* The Canvas2DContext to draw the sub-path on.
*
* @param metrics DrawMetrics
* DrawMetrics object to use as the bounding box for the sub-path.
*/
ShapeBase.prototype.drawShape =
function (ctx, metrics)
{
//Stub for override.
};
|
class ConfigMongoose {
constructor() {
this.Model = null;
this.populate = '';
Object.seal(this);
}
}
export default ConfigMongoose; |
"""
Test for the API views.
"""
from django.test import TestCase
from django.test.client import Client
from api.apiconsumer.models import APIConsumer
class NoTokenTest(TestCase):
def setUp(self):
self.client = Client()
def test_no_token_should_403(self):
response = self.client.get('/depts/')
self.assertEqual(response.status_code, 403, response.request)
def test_invalid_token_should_403(self):
response = self.client.get('/depts/', {'token': 'token'})
self.assertEqual(response.status_code, 403, response.request)
class Permission0Test(TestCase):
def setUp(self):
self.consumer = APIConsumer.objects.create(
name='test',
email='[email protected]',
description='',
token='root',
permission_level=0
)
self.client = Client()
def test_valid_token_should_403(self):
response = self.client.get('/depts/', {'token': self.consumer.token})
self.assertEqual(response.status_code, 403, response.request)
class Permission1Test(TestCase):
def setUp(self):
self.consumer = APIConsumer.objects.create(
name='test',
email='[email protected]',
description='',
token='root',
permission_level=1
)
self.client = Client()
def test_valid_token_should_not_403(self):
response = self.client.get('/depts/', {'token': self.consumer.token})
self.assertNotEqual(response.status_code, 403, response.request)
|
import unittest
import numpy
import chainer
from chainer.backends import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
@testing.parameterize(*testing.product({
'in_shape': [(3, 4, 2)],
'axis1': [0],
'axis2': [1],
'dtype': [numpy.float16, numpy.float32, numpy.float32],
}))
class TestSwapaxes(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(
0.5, 1, self.in_shape).astype(self.dtype)
self.g = numpy.random.uniform(
0.5, 1, self.in_shape).astype(self.dtype)
self.g = self.g.swapaxes(self.axis1, self.axis2)
self.gg = numpy.random.uniform(
0.5, 1, self.in_shape).astype(self.dtype)
def check_forward(self, x_data):
axis1, axis2 = self.axis1, self.axis2
x = chainer.Variable(x_data)
y = functions.swapaxes(x, axis1, axis2)
self.assertEqual(y.data.dtype, self.dtype)
self.assertTrue((self.x.swapaxes(axis1, axis2) ==
cuda.to_cpu(y.data)).all())
def test_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x))
def check_backward(self, x_data):
x = chainer.Variable(x_data)
y = functions.swapaxes(x, self.axis1, self.axis2)
y.grad = y.data
y.backward()
testing.assert_allclose(x.data, x.grad, atol=0, rtol=0)
def test_backward_cpu(self):
self.check_backward(self.x)
@attr.gpu
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x))
@condition.retry(3)
def check_double_backward(self, x_data, g_data, gg_data):
def f(x):
y = functions.swapaxes(x, self.axis1, self.axis2)
return y * y
gradient_check.check_double_backward(
f, x_data, g_data, gg_data, dtype=numpy.float64,
atol=5e-2, rtol=5e-3)
def test_double_backward_cpu(self):
self.check_double_backward(self.x, self.g, self.gg)
@attr.gpu
def test_double_backward_gpu(self):
self.check_double_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.g),
cuda.to_gpu(self.gg))
testing.run_module(__name__, __file__)
|
import { POST, GET, DELETE, PUT } from "src/utils/api_calls";
import { ActionType } from "../../actions";
import { toast } from "react-toastify";
export default class CategoryAction {
static GetProviderAllExperiences = (token) => {
return async (dispatch) => {
console.log("get GetProviderAllExperiences data in Action", token);
// dispatch({ type: ActionType.GET_ALL_PROVIDER_EXPERIENCES });
await GET("experience/provider", token).then((data) => {
if (data) {
console.log("GOT THE EXPERIENCES ALL!!", data?.data);
dispatch({
type: ActionType.GET_ALL_PROVIDER_EXPERIENCES_SUCCESS,
payload: data.data,
});
} else {
// dispatch({ type: ActionType.GET_ALL_PROVIDER_EXPERIENCES_FAIL });
}
});
};
};
static GetExperienceById = (id, token) => {
return async (dispatch) => {
console.log("get experience by id data in Action", token);
dispatch({ type: ActionType.GET_PROVIDER_EXPERIENCE });
await GET(`experience/${id}`, token).then((data) => {
if (data) {
console.log("GOT THE EXPERIENCE ID!!------", data.data);
dispatch({
type: ActionType.GET_PROVIDER_EXPERIENCE_SUCCESS,
payload: data.data,
});
} else {
dispatch({ type: ActionType.GET_PROVIDER_EXPERIENCE_FAIL });
}
});
};
};
static AddExperience = (data, token, history) => {
return async (dispatch) => {
console.log("post experience data in Action", data);
dispatch({ type: ActionType.PROVIDER_EXPERIENCE_ADD });
await POST("experience", data, token).then((data) => {
if (data) {
console.log("ADD THE EXPERIENCE", data);
toast("EXPERIENCE ADDED SUCCESSFULLY");
dispatch({ type: ActionType.PROVIDER_EXPERIENCE_ADD_SUCCESS });
history.push("/myexperience");
} else {
dispatch({ type: ActionType.PROVIDER_EXPERIENCE_ADD_FAIL });
}
});
};
};
static CreateSession = (data, token, history) => {
return async (dispatch) => {
console.log("post experience data in Action", data);
dispatch({ type: ActionType.PROVIDER_EXPERIENCE_ADD });
await POST("session", data, token).then((data) => {
if (data) {
toast("EXPERIENCE ADDED SUCCESSFULLY");
dispatch({ type: ActionType.PROVIDER_EXPERIENCE_ADD_SUCCESS });
history.goBack()
// history.push("/myexperience");
} else {
dispatch({ type: ActionType.PROVIDER_EXPERIENCE_ADD_FAIL });
}
});
};
}
static ShowMySessions = (data) => {
return async (dispatch) => {
dispatch({ type: ActionType.GET_MY_EXPERIENCE_SESSION, payload: data });
};
};
static UpdateExperience = (id, data, token, history) => {
return async (dispatch) => {
console.log("update experience data in Action", data);
dispatch({ type: ActionType.EXPERIENCE_UPDATE });
await PUT(`experience/${id}`, data, token).then((data) => {
if (data) {
console.log("UPDATE THE EXPERIENCE", data);
toast("EXPERIENCE UPDATED SUCCESSFULLY");
dispatch({ type: ActionType.EXPERIENCE_UPDATE_SUCCESS });
history.push("/myexperience");
} else {
dispatch({ type: ActionType.EXPERIENCE_UPDATE_FAIL });
}
});
};
};
static CreateExpeirence = (id, data, token, history) => {
return async (dispatch) => {
console.log("update experience data in Action", data);
dispatch({ type: ActionType.EXPERIENCE_UPDATE });
await POST(`experience/${id}`, data, token).then((data) => {
if (data) {
console.log("UPDATE THE EXPERIENCE", data);
toast("EXPERIENCE UPDATED SUCCESSFULLY");
dispatch({ type: ActionType.EXPERIENCE_UPDATE_SUCCESS });
history.push("/myexperience");
} else {
dispatch({ type: ActionType.EXPERIENCE_UPDATE_FAIL });
}
});
};
};
static DeleteExperience = (id, token, history) => {
return async (dispatch) => {
console.log("delete experience data in Action", id);
dispatch({ type: ActionType.EXPERIENCE_DELETE });
await DELETE(`experience/${id}`, token).then((data) => {
if (data) {
console.log("DELETE THE EXPERIENCE", data);
toast("EXPERIENCE DELETED SUCCESSFULLY");
dispatch({ type: ActionType.EXPERIENCE_DELETE_SUCCESS });
} else {
dispatch({ type: ActionType.EXPERIENCE_DELETE_FAIL });
}
});
};
};
}
|
import { h } from 'vue'
export default {
name: "TextHThreeFill",
vendor: "Ph",
type: "",
tags: ["text","h","three","fill"],
render() {
return h(
"svg",
{"xmlns":"http://www.w3.org/2000/svg","viewBox":"0 0 256 256","class":"v-icon","fill":"currentColor","data-name":"ph-text-h-three-fill","innerHTML":" <rect width='256' height='256' fill='none'/> <path d='M152,56V176a8,8,0,0,1-16,0V124H48v52a8,8,0,0,1-16,0V56a8,8,0,0,1,16,0v52h88V56a8,8,0,0,1,16,0Zm85.459,94.54492a35.92878,35.92878,0,0,0-11.92578-7.92676l21.01856-30.03906a8,8,0,0,0-6.55469-12.58691h-48a8,8,0,0,0,0,16h32.63867l-19.1875,27.4209A8,8,0,0,0,212.00293,156a20,20,0,1,1-14.1416,34.1416,8.00052,8.00052,0,0,0-11.31445,11.31445,35.99995,35.99995,0,1,0,50.9121-50.91113Z'/>"},
)
}
} |
# Account Management Providers
#
# Copyright (C) 2013-2014 Red Hat, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of the FreeBSD Project.
#
#
# Authors: Roman Rakus <[email protected]>
# Authors: Jan Safranek <[email protected]>
#
"""
POSIX group information and management.
Usage:
%(cmd)s list [ <group> ...]
%(cmd)s create [--reserved] [--gid=gid] <group>
%(cmd)s delete <group>
%(cmd)s listuser [<group>] ...
%(cmd)s adduser <group> <user> ...
%(cmd)s removeuser <group> <user> ...
Commands:
list List groups. If no groups are given, all are listed.
create Creates a new group.
delete Deletes a group.
listuser List a users in a group or in a list of groups.
adduser Adds a user or a list of users to the group.
removeuser Removes a user or a list of users from the group.
Options:
-r, --reserved Create a system group.
-g, --gid=gid GID for a new group.
"""
from lmi.scripts.common import command
from lmi.scripts.common.errors import LmiInvalidOptions
from lmi.scripts.common.errors import LmiFailed
from lmi.scripts.common import get_logger
from lmi.scripts import account
LOG = get_logger(__name__)
class Lister(command.LmiInstanceLister):
PROPERTIES = (
'Name',
('GID', lambda i: i.InstanceID.split(":")[-1])
)
def transform_options(self, options):
"""
Rename 'group' option to 'groups' parameter name for better
readability
"""
options['<groups>'] = options.pop('<group>')
def execute(self, ns, groups):
if groups:
for group in groups:
inst = account.get_group(ns, group)
yield inst
else:
for group in account.list_groups(ns):
yield group
class ListUser(command.LmiLister):
COLUMNS = ('Group', 'Users')
def transform_options(self, options):
"""
Rename 'group' option to 'groups' parameter name for better
readability
"""
options['<groups>'] = options.pop('<group>')
def execute(self, ns, groups):
if not groups:
group_insts = account.list_groups(ns)
else:
group_insts = []
for group in groups:
group_insts.append(account.get_group(ns, group))
for group in group_insts:
users = account.get_users_in_group(ns, group)
yield (group.Name, ', '.join([user.Name for user in users]))
class Create(command.LmiCheckResult):
OPT_NO_UNDERSCORES = True
EXPECT = None
def verify_options(self, opts):
_gid = opts.get('gid', None)
if _gid is not None and not _gid.isdigit():
raise LmiInvalidOptions("Group ID must be a number")
def transform_options(self, options):
"""
Change 'group' list to string
"""
options['<group>'] = options.pop('<group>')[0]
def execute(self, ns, group, reserved=None, gid=None):
account.create_group(ns, group, reserved, gid)
class Delete(command.LmiCheckResult):
EXPECT = None
def transform_options(self, options):
"""
Change 'group' list to string
"""
options['<group>'] = options.pop('<group>')[0]
def execute(self, ns, group):
group = account.get_group(ns, group)
account.delete_group(ns, group)
class Add(command.LmiCheckResult):
EXPECT = None
def transform_options(self, options):
"""
Change 'group' list to string
Rename 'user' to 'users'
"""
options['<group>'] = options.pop('<group>')[0]
options['<users>'] = options.pop('<user>')
def execute(self, ns, group, users):
group_inst = account.get_group(ns, group)
user_insts = []
for user in users:
user_inst = account.get_user(ns, user)
user_insts.append(user_inst)
account.add_to_group(ns, group_inst, user_insts)
class Remove(command.LmiCheckResult):
EXPECT = None
def transform_options(self, options):
"""
Change 'group' list to string
Rename 'user' to 'users'
"""
options['<group>'] = options.pop('<group>')[0]
options['<users>'] = options.pop('<user>')
def execute(self, ns, group, users):
group_inst = account.get_group(ns, group)
user_insts = []
for user in users:
user_inst = account.get_user(ns, user)
user_insts.append(user_inst)
account.remove_from_group(ns, group_inst, user_insts)
Group = command.register_subcommands(
'group', __doc__,
{ 'list' : Lister
, 'create' : Create
, 'delete' : Delete
, 'listuser': ListUser
, 'adduser' : Add
, 'removeuser': Remove
},
)
|
import sys
import re
import traceback
import os
from opsbro.util import lower_dict
from opsbro.collector import Collector
if os.name == 'nt':
import opsbro.misc.wmi as wmi
class Memory(Collector):
def launch(self):
logger = self.logger
# logger.debug('getMemoryUsage: start')
if os.name == 'nt':
data = {}
# get physical available memory
_os = wmi.wmiaccess.get_table_where('Win32_OperatingSystem', {})[0]
data['Memory Total MBytes'] = total_memory = int(_os.TotalVisibleMemorySize) / 1024
counters = [
(r'Memory Available MBytes', r'\Memory\Available MBytes', 0),
(r'swap Input/sec', r'\Memory\Pages Input/sec', 100),
(r'swap % usage', r'\Paging File(*)\% Usage', 0),
(r'swap % usage peak', r'\Paging File(*)\% Usage Peak', 0),
]
for c in counters:
_label = c[0]
_query = c[1]
_delay = c[2]
v = wmi.wmiaccess.get_perf_data(_query, unit='double', delay=_delay)
data[_label] = v
data['Memory Usage %'] = 100 * (total_memory - data['Memory Available MBytes']) / total_memory
return data
# If Linux like procfs system is present and mounted we use meminfo, else we use "native" mode (vmstat and swapinfo)
if sys.platform.startswith('linux'):
# logger.debug('getMemoryUsage: linux2')
try:
with open('/proc/meminfo', 'r') as meminfoProc:
lines = meminfoProc.readlines()
except IOError as e:
logger.error('getMemoryUsage: exception = %s', e)
return False
# logger.debug('getMemoryUsage: open success, parsing')
regexp = re.compile(r'([0-9]+)') # We run this several times so one-time compile now
meminfo = {}
# Loop through and extract the numerical values
for line in lines:
values = line.split(':')
try:
# Picks out the key (values[0]) and makes a list with the value as the meminfo value (values[1])
# We are only interested in the KB data so regexp that out
match = re.search(regexp, values[1])
if match is not None:
meminfo[str(values[0])] = int(match.group(0))
except IndexError:
break
# logger.debug('getMemoryUsage: parsing, looped')
# put all keys in lower case
meminfo = lower_dict(meminfo)
memData = {}
memData['phys_free'] = 0
memData['phys_used'] = 0
memData['cached'] = 0
memData['swap_free'] = 0
memData['swap_used'] = 0
# Phys
try:
# logger.debug('getMemoryUsage: formatting (phys)')
physTotal = meminfo['memtotal']
physFree = meminfo['memfree'] + meminfo['buffers'] + meminfo['cached'] + meminfo['sreclaimable'] # also count io cache and system one (slab)
physUsed = 100 * (physTotal - float(physFree)) / physTotal
# Convert to MB
meminfo['phys_total'] = physTotal
meminfo['phys_free'] = physFree
meminfo['phys_used'] = physUsed
# Stops the agent crashing if one of the meminfo elements isn't set
except IndexError:
logger.error('getMemoryUsage: formatting (phys) IndexError - cached, memtotal or memfree not present')
except KeyError:
logger.error('getMemoryUsage: formatting (phys) KeyError - cached, memtotal or memfree not present')
logger.debug('getMemoryUsage: formatted (phys)')
# Swap
try:
# logger.debug('getMemoryUsage: formatting (swap)')
swapTotal = meminfo['swaptotal']
swapFree = meminfo['swapfree']
if swapTotal == 0:
swapUsed = 0
else:
swapUsed = 100 * (swapTotal - float(swapFree)) / swapTotal
meminfo['swap_free'] = swapFree
meminfo['swap_used'] = swapUsed
# Stops the agent crashing if one of the meminfo elements isn't set
except IndexError:
logger.error('getMemoryUsage: formatting (swap) IndexError - SwapTotal or SwapFree not present')
except KeyError:
logger.error('getMemoryUsage: formatting (swap) KeyError - SwapTotal or SwapFree not present')
logger.debug('getMemoryUsage: formatted (swap), completed, returning')
return meminfo
elif sys.platform.find('freebsd') != -1:
logger.debug('getMemoryUsage: freebsd (native)')
physFree = None
try:
try:
logger.debug('getMemoryUsage: attempting sysinfo')
proc = subprocess.Popen(['sysinfo', '-v', 'mem'], stdout=subprocess.PIPE, close_fds=True)
sysinfo = proc.communicate()[0]
if int(pythonVersion[1]) >= 6:
try:
proc.kill()
except Exception as e:
logger.debug('Process already terminated')
sysinfo = sysinfo.split('\n')
regexp = re.compile(r'([0-9]+)') # We run this several times so one-time compile now
for line in sysinfo:
parts = line.split(' ')
if parts[0] == 'Free':
logger.debug('getMemoryUsage: parsing free')
for part in parts:
match = re.search(regexp, part)
if match != None:
physFree = match.group(0)
logger.debug('getMemoryUsage: sysinfo: found free %s', physFree)
if parts[0] == 'Active':
logger.debug('getMemoryUsage: parsing used')
for part in parts:
match = re.search(regexp, part)
if match != None:
physUsed = match.group(0)
logger.debug('getMemoryUsage: sysinfo: found used %s', physUsed)
if parts[0] == 'Cached':
logger.debug('getMemoryUsage: parsing cached')
for part in parts:
match = re.search(regexp, part)
if match != None:
cached = match.group(0)
logger.debug('getMemoryUsage: sysinfo: found cached %s', cached)
except OSError as e:
logger.debug('getMemoryUsage: sysinfo not available')
except Exception as e:
logger.error('getMemoryUsage: exception = %s', traceback.format_exc())
finally:
if int(pythonVersion[1]) >= 6:
try:
proc.kill()
except Exception as e:
logger.debug('Process already terminated')
if physFree == None:
logger.info(
'getMemoryUsage: sysinfo not installed so falling back on sysctl. sysinfo provides more accurate memory info so is recommended. http://www.freshports.org/sysutils/sysinfo')
try:
try:
logger.debug('getMemoryUsage: attempting Popen (sysctl)')
proc = subprocess.Popen(['sysctl', '-n', 'hw.physmem'], stdout=subprocess.PIPE, close_fds=True)
physTotal = proc.communicate()[0]
if int(pythonVersion[1]) >= 6:
try:
proc.kill()
except Exception as e:
logger.debug('Process already terminated')
logger.debug('getMemoryUsage: attempting Popen (vmstat)')
proc = subprocess.Popen(['vmstat', '-H'], stdout=subprocess.PIPE, close_fds=True)
vmstat = proc.communicate()[0]
if int(pythonVersion[1]) >= 6:
try:
proc.kill()
except Exception as e:
logger.debug('Process already terminated')
except Exception as e:
logger.error('getMemoryUsage: exception = %s', traceback.format_exc())
return False
finally:
if int(pythonVersion[1]) >= 6:
try:
proc.kill()
except Exception as e:
logger.debug('Process already terminated')
logger.debug('getMemoryUsage: Popen success, parsing')
# First we parse the information about the real memory
lines = vmstat.split('\n')
physParts = lines[2].split(' ')
physMem = []
# We need to loop through and capture the numerical values
# because sometimes there will be strings and spaces
for k, v in enumerate(physParts):
if re.match(r'([0-9]+)', v) != None:
physMem.append(v)
physTotal = int(physTotal.strip()) / 1024 # physFree is returned in B, but we need KB so we convert it
physFree = int(physMem[4])
physUsed = int(physTotal - physFree)
logger.debug('getMemoryUsage: parsed vmstat')
# Convert everything to MB
physUsed = int(physUsed) / 1024
physFree = int(physFree) / 1024
cached = 'NULL'
#
# Swap memory details
#
logger.debug('getMemoryUsage: attempting Popen (swapinfo)')
try:
try:
proc = subprocess.Popen(['swapinfo', '-k'], stdout=subprocess.PIPE, close_fds=True)
swapinfo = proc.communicate()[0]
if int(pythonVersion[1]) >= 6:
try:
proc.kill()
except Exception as e:
logger.debug('Process already terminated')
except Exception as e:
logger.error('getMemoryUsage: exception = %s', traceback.format_exc())
return False
finally:
if int(pythonVersion[1]) >= 6:
try:
proc.kill()
except Exception as e:
logger.debug('Process already terminated')
lines = swapinfo.split('\n')
swapUsed = 0
swapFree = 0
for index in range(1, len(lines)):
swapParts = re.findall(r'(\d+)', lines[index])
if swapParts != None:
try:
swapUsed += int(swapParts[len(swapParts) - 3]) / 1024
swapFree += int(swapParts[len(swapParts) - 2]) / 1024
except IndexError as e:
pass
logger.debug('getMemoryUsage: parsed swapinfo, completed, returning')
return {'physUsed': physUsed, 'physFree': physFree, 'swapUsed': swapUsed, 'swapFree': swapFree,
'cached' : cached}
elif sys.platform == 'darwin':
logger.debug('getMemoryUsage: darwin')
try:
try:
logger.debug('getMemoryUsage: attempting Popen (top)')
proc = subprocess.Popen(['top', '-l 1'], stdout=subprocess.PIPE, close_fds=True)
top = proc.communicate()[0]
if int(pythonVersion[1]) >= 6:
try:
proc.kill()
except Exception as e:
logger.debug('Process already terminated')
logger.debug('getMemoryUsage: attempting Popen (sysctl)')
proc = subprocess.Popen(['sysctl', 'vm.swapusage'], stdout=subprocess.PIPE, close_fds=True)
sysctl = proc.communicate()[0]
if int(pythonVersion[1]) >= 6:
try:
proc.kill()
except Exception as e:
logger.debug('Process already terminated')
except Exception as e:
logger.error('getMemoryUsage: exception = %s', traceback.format_exc())
return False
finally:
if int(pythonVersion[1]) >= 6:
try:
proc.kill()
except Exception as e:
logger.debug('Process already terminated')
logger.debug('getMemoryUsage: Popen success, parsing')
# Deal with top
lines = top.split('\n')
physParts = re.findall(r'([0-9]\d+)', lines[self.topIndex])
logger.debug('getMemoryUsage: parsed top')
# Deal with sysctl
swapParts = re.findall(r'([0-9]+\.\d+)', sysctl)
logger.debug('getMemoryUsage: parsed sysctl, completed, returning')
return {'physUsed': physParts[3], 'physFree': physParts[4], 'swapUsed': swapParts[1],
'swapFree': swapParts[2], 'cached': 'NULL'}
else:
self.set_not_eligible('This system is not managed by this collector.')
return False
|
import argparse
import base64
import re
import time
import requests
import rsa
class CheckIn(object):
client = requests.Session()
login_url = "https://cloud.189.cn/api/portal/loginUrl.action?" \
"redirectURL=https://cloud.189.cn/web/redirect.html?returnURL=/main.action"
submit_login_url = "https://open.e.189.cn/api/logbox/oauth2/loginSubmit.do"
sign_url = ("https://api.cloud.189.cn/mkt/userSign.action?rand=%s"
"&clientType=TELEANDROID&version=8.6.3&model=SM-G930K")
def __init__(self, username, password):
self.username = username
self.password = password
def check_in(self):
self.login()
rand = str(round(time.time() * 1000))
url = "https://m.cloud.189.cn/v2/drawPrizeMarketDetails.action?taskId=TASK_SIGNIN&activityId=ACT_SIGNIN"
url2 = "https://m.cloud.189.cn/v2/drawPrizeMarketDetails.action?taskId=TASK_SIGNIN_PHOTOS&activityId=ACT_SIGNIN"
headers = {
"User-Agent": "Mozilla/5.0 (Linux; Android 5.1.1; SM-G930K Build/NRD90M; wv)"
" AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/74"
".0.3729.136 Mobile Safari/537.36 Ecloud/8.6.3 Android/22 clie"
"ntId/355325117317828 clientModel/SM-G930K imsi/46007111431782"
"4 clientChannelId/qq proVersion/1.0.6",
"Referer": "https://m.cloud.189.cn/zhuanti/2016/sign/index.jsp?albumBackupOpened=1",
"Host": "m.cloud.189.cn",
"Accept-Encoding": "gzip, deflate",
}
response = self.client.get(self.sign_url % rand, headers=headers)
net_disk_bonus = response.json()["netdiskBonus"]
if response.json()["isSign"] == "false":
print(f"未签到,签到获得{net_disk_bonus}M空间")
else:
print(f"已经签到过了,签到获得{net_disk_bonus}M空间")
headers = {
"User-Agent": "Mozilla/5.0 (Linux; Android 5.1.1; SM-G930K Build/NRD90M; wv) "
"AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/74.0"
".3729.136 Mobile Safari/537.36 Ecloud/8.6.3 Android/22 clientI"
"d/355325117317828 clientModel/SM-G930K imsi/460071114317824 cl"
"ientChannelId/qq proVersion/1.0.6",
"Referer": "https://m.cloud.189.cn/zhuanti/2016/sign/index.jsp?albumBackupOpened=1",
"Host": "m.cloud.189.cn",
"Accept-Encoding": "gzip, deflate",
}
response = self.client.get(url, headers=headers)
if "errorCode" in response.text:
print(response.text)
else:
prize_name = (response.json() or {}).get("prizeName")
print(f"抽奖获得{prize_name}")
response = self.client.get(url2, headers=headers)
if "errorCode" in response.text:
print(response.text)
else:
prize_name = (response.json() or {}).get("prizeName")
print(f"抽奖获得{prize_name}")
@staticmethod
def rsa_encode(rsa_key, string):
rsa_key = f"-----BEGIN PUBLIC KEY-----\n{rsa_key}\n-----END PUBLIC KEY-----"
pubkey = rsa.PublicKey.load_pkcs1_openssl_pem(rsa_key.encode())
result = b64_to_hex((base64.b64encode(rsa.encrypt(f"{string}".encode(), pubkey))).decode())
return result
def login(self):
r = self.client.get(self.login_url)
captcha_token = re.findall(r"captchaToken' value='(.+?)'", r.text)[0]
lt = re.findall(r'lt = "(.+?)"', r.text)[0]
return_url = re.findall(r"returnUrl = '(.+?)'", r.text)[0]
param_id = re.findall(r'paramId = "(.+?)"', r.text)[0]
j_rsa_key = re.findall(r'j_rsaKey" value="(\S+)"', r.text, re.M)[0]
self.client.headers.update({"lt": lt})
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:74.0) Gecko/20100101 Firefox/76.0",
"Referer": "https://open.e.189.cn/",
}
data = {
"appKey": "cloud",
"accountType": "03",
"userName": f"{{RSA}}{self.rsa_encode(j_rsa_key, self.username)}",
"password": f"{{RSA}}{self.rsa_encode(j_rsa_key, self.password)}",
"validateCode": "",
"captchaToken": captcha_token,
"returnUrl": return_url,
"mailSuffix": "@189.cn",
"paramId": param_id,
}
r = self.client.post(self.submit_login_url, data=data, headers=headers, timeout=5)
if r.json()["result"] == 0:
print(r.json()["msg"])
else:
print(r.json()["msg"])
redirect_url = r.json()["toUrl"]
self.client.get(redirect_url)
def _chr(a):
return "0123456789abcdefghijklmnopqrstuvwxyz"[a]
b64map = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
def b64_to_hex(a):
d = ""
e = 0
c = 0
for i in range(len(a)):
if list(a)[i] != "=":
v = b64map.index(list(a)[i])
if 0 == e:
e = 1
d += _chr(v >> 2)
c = 3 & v
elif 1 == e:
e = 2
d += _chr(c << 2 | v >> 4)
c = 15 & v
elif 2 == e:
e = 3
d += _chr(c)
d += _chr(v >> 2)
c = 3 & v
else:
e = 0
d += _chr(c << 2 | v >> 4)
d += _chr(15 & v)
if e == 1:
d += _chr(c << 2)
return d
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='天翼云签到脚本')
parser.add_argument('--username', type=str, help='账号')
parser.add_argument('--password', type=str, help='密码')
args = parser.parse_args()
helper = CheckIn(args.username, args.password)
helper.check_in()
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[11],{"8jRI":function(r,t,e){"use strict";var n=new RegExp("%[a-f0-9]{2}","gi"),o=new RegExp("(%[a-f0-9]{2})+","gi");function a(r,t){try{return decodeURIComponent(r.join(""))}catch(o){}if(1===r.length)return r;t=t||1;var e=r.slice(0,t),n=r.slice(t);return Array.prototype.concat.call([],a(e),a(n))}function i(r){try{return decodeURIComponent(r)}catch(o){for(var t=r.match(n),e=1;e<t.length;e++)t=(r=a(t,e).join("")).match(n);return r}}r.exports=function(r){if("string"!=typeof r)throw new TypeError("Expected `encodedURI` to be of type `string`, got `"+typeof r+"`");try{return r=r.replace(/\+/g," "),decodeURIComponent(r)}catch(t){return function(r){for(var e={"%FE%FF":"��","%FF%FE":"��"},n=o.exec(r);n;){try{e[n[0]]=decodeURIComponent(n[0])}catch(t){var a=i(n[0]);a!==n[0]&&(e[n[0]]=a)}n=o.exec(r)}e["%C2"]="�";for(var c=Object.keys(e),u=0;u<c.length;u++){var s=c[u];r=r.replace(new RegExp(s,"g"),e[s])}return r}(r)}}},"8yz6":function(r,t,e){"use strict";r.exports=function(r,t){if("string"!=typeof r||"string"!=typeof t)throw new TypeError("Expected the arguments to be of type `string`");if(""===t)return[r];var e=r.indexOf(t);return-1===e?[r]:[r.slice(0,e),r.slice(e+t.length)]}},Bnag:function(r,t){r.exports=function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}},EbDI:function(r,t){r.exports=function(r){if("undefined"!=typeof Symbol&&Symbol.iterator in Object(r))return Array.from(r)}},Ijbi:function(r,t,e){var n=e("WkPL");r.exports=function(r){if(Array.isArray(r))return n(r)}},J4zp:function(r,t,e){var n=e("wTVA"),o=e("m0LI"),a=e("ZhPi"),i=e("wkBT");r.exports=function(r,t){return n(r)||o(r,t)||a(r,t)||i()}},OcEc:function(r,t,e){"use strict";e.r(t),e.d(t,"default",(function(){return s}));var n=e("q1tI"),o=e.n(n),a=(e("a6qw"),e("VflZ")),i=e("M55E"),c=e.n(i),u=e("cr+I");function s(r){var t=Object(n.useState)(),e=t[0],i=t[1];return Object(n.useEffect)((function(){var t=u.parse(r.location.search).id;fetch("https://api.evangelie.by/publications/"+t).then((function(r){return r.json()})).then((function(r){i(r)}))}),[r.location.search]),o.a.createElement(a.a,null,o.a.createElement("div",{className:"row mt-2"},o.a.createElement("div",{className:"col-12 text-center"},o.a.createElement("h2",{className:"publication-page-title"},null==e?void 0:e.label)),o.a.createElement("div",{className:"col-12"},(null==e?void 0:e.content)&&o.a.createElement("div",{dangerouslySetInnerHTML:{__html:(new c.a.Converter).makeHtml(e.content)}}))))}},Pmem:function(r,t,e){"use strict";r.exports=function(r){return encodeURIComponent(r).replace(/[!'()*]/g,(function(r){return"%".concat(r.charCodeAt(0).toString(16).toUpperCase())}))}},RIqP:function(r,t,e){var n=e("Ijbi"),o=e("EbDI"),a=e("ZhPi"),i=e("Bnag");r.exports=function(r){return n(r)||o(r)||a(r)||i()}},WkPL:function(r,t){r.exports=function(r,t){(null==t||t>r.length)&&(t=r.length);for(var e=0,n=new Array(t);e<t;e++)n[e]=r[e];return n}},ZhPi:function(r,t,e){var n=e("WkPL");r.exports=function(r,t){if(r){if("string"==typeof r)return n(r,t);var e=Object.prototype.toString.call(r).slice(8,-1);return"Object"===e&&r.constructor&&(e=r.constructor.name),"Map"===e||"Set"===e?Array.from(r):"Arguments"===e||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(e)?n(r,t):void 0}}},c0go:function(r,t,e){"use strict";r.exports=function(r,t){for(var e={},n=Object.keys(r),o=Array.isArray(t),a=0;a<n.length;a++){var i=n[a],c=r[i];(o?-1!==t.indexOf(i):t(i,c,r))&&(e[i]=c)}return e}},"cr+I":function(r,t,e){"use strict";var n=e("J4zp"),o=e("RIqP");function a(r,t){var e;if("undefined"==typeof Symbol||null==r[Symbol.iterator]){if(Array.isArray(r)||(e=function(r,t){if(!r)return;if("string"==typeof r)return i(r,t);var e=Object.prototype.toString.call(r).slice(8,-1);"Object"===e&&r.constructor&&(e=r.constructor.name);if("Map"===e||"Set"===e)return Array.from(r);if("Arguments"===e||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(e))return i(r,t)}(r))||t&&r&&"number"==typeof r.length){e&&(r=e);var n=0,o=function(){};return{s:o,n:function(){return n>=r.length?{done:!0}:{done:!1,value:r[n++]}},e:function(r){throw r},f:o}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var a,c=!0,u=!1;return{s:function(){e=r[Symbol.iterator]()},n:function(){var r=e.next();return c=r.done,r},e:function(r){u=!0,a=r},f:function(){try{c||null==e.return||e.return()}finally{if(u)throw a}}}}function i(r,t){(null==t||t>r.length)&&(t=r.length);for(var e=0,n=new Array(t);e<t;e++)n[e]=r[e];return n}var c=e("Pmem"),u=e("8jRI"),s=e("8yz6"),l=e("c0go");function f(r){if("string"!=typeof r||1!==r.length)throw new TypeError("arrayFormatSeparator must be single character string")}function p(r,t){return t.encode?t.strict?c(r):encodeURIComponent(r):r}function y(r,t){return t.decode?u(r):r}function m(r){var t=r.indexOf("#");return-1!==t&&(r=r.slice(0,t)),r}function d(r){var t=(r=m(r)).indexOf("?");return-1===t?"":r.slice(t+1)}function v(r,t){return t.parseNumbers&&!Number.isNaN(Number(r))&&"string"==typeof r&&""!==r.trim()?r=Number(r):!t.parseBooleans||null===r||"true"!==r.toLowerCase()&&"false"!==r.toLowerCase()||(r="true"===r.toLowerCase()),r}function b(r,t){f((t=Object.assign({decode:!0,sort:!0,arrayFormat:"none",arrayFormatSeparator:",",parseNumbers:!1,parseBooleans:!1},t)).arrayFormatSeparator);var e=function(r){var t;switch(r.arrayFormat){case"index":return function(r,e,n){t=/\[(\d*)\]$/.exec(r),r=r.replace(/\[\d*\]$/,""),t?(void 0===n[r]&&(n[r]={}),n[r][t[1]]=e):n[r]=e};case"bracket":return function(r,e,n){t=/(\[\])$/.exec(r),r=r.replace(/\[\]$/,""),t?void 0!==n[r]?n[r]=[].concat(n[r],e):n[r]=[e]:n[r]=e};case"comma":case"separator":return function(t,e,n){var o="string"==typeof e&&e.includes(r.arrayFormatSeparator),a="string"==typeof e&&!o&&y(e,r).includes(r.arrayFormatSeparator);e=a?y(e,r):e;var i=o||a?e.split(r.arrayFormatSeparator).map((function(t){return y(t,r)})):null===e?e:y(e,r);n[t]=i};case"bracket-separator":return function(t,e,n){var o=/(\[\])$/.test(t);if(t=t.replace(/\[\]$/,""),o){var a=null===e?[]:e.split(r.arrayFormatSeparator).map((function(t){return y(t,r)}));void 0!==n[t]?n[t]=[].concat(n[t],a):n[t]=a}else n[t]=e?y(e,r):e};default:return function(r,t,e){void 0!==e[r]?e[r]=[].concat(e[r],t):e[r]=t}}}(t),o=Object.create(null);if("string"!=typeof r)return o;if(!(r=r.trim().replace(/^[?#&]/,"")))return o;var i,c=a(r.split("&"));try{for(c.s();!(i=c.n()).done;){var u=i.value;if(""!==u){var l=s(t.decode?u.replace(/\+/g," "):u,"="),p=n(l,2),m=p[0],d=p[1];d=void 0===d?null:["comma","separator","bracket-separator"].includes(t.arrayFormat)?d:y(d,t),e(y(m,t),d,o)}}}catch(I){c.e(I)}finally{c.f()}for(var b=0,g=Object.keys(o);b<g.length;b++){var h=g[b],j=o[h];if("object"==typeof j&&null!==j)for(var w=0,k=Object.keys(j);w<k.length;w++){var x=k[w];j[x]=v(j[x],t)}else o[h]=v(j,t)}return!1===t.sort?o:(!0===t.sort?Object.keys(o).sort():Object.keys(o).sort(t.sort)).reduce((function(r,t){var e=o[t];return Boolean(e)&&"object"==typeof e&&!Array.isArray(e)?r[t]=function r(t){return Array.isArray(t)?t.sort():"object"==typeof t?r(Object.keys(t)).sort((function(r,t){return Number(r)-Number(t)})).map((function(r){return t[r]})):t}(e):r[t]=e,r}),Object.create(null))}t.extract=d,t.parse=b,t.stringify=function(r,t){if(!r)return"";f((t=Object.assign({encode:!0,strict:!0,arrayFormat:"none",arrayFormatSeparator:","},t)).arrayFormatSeparator);for(var e=function(e){return t.skipNull&&null==r[e]||t.skipEmptyString&&""===r[e]},n=function(r){switch(r.arrayFormat){case"index":return function(t){return function(e,n){var a=e.length;return void 0===n||r.skipNull&&null===n||r.skipEmptyString&&""===n?e:[].concat(o(e),null===n?[[p(t,r),"[",a,"]"].join("")]:[[p(t,r),"[",p(a,r),"]=",p(n,r)].join("")])}};case"bracket":return function(t){return function(e,n){return void 0===n||r.skipNull&&null===n||r.skipEmptyString&&""===n?e:[].concat(o(e),null===n?[[p(t,r),"[]"].join("")]:[[p(t,r),"[]=",p(n,r)].join("")])}};case"comma":case"separator":case"bracket-separator":var t="bracket-separator"===r.arrayFormat?"[]=":"=";return function(e){return function(n,o){return void 0===o||r.skipNull&&null===o||r.skipEmptyString&&""===o?n:(o=null===o?"":o,0===n.length?[[p(e,r),t,p(o,r)].join("")]:[[n,p(o,r)].join(r.arrayFormatSeparator)])}};default:return function(t){return function(e,n){return void 0===n||r.skipNull&&null===n||r.skipEmptyString&&""===n?e:[].concat(o(e),null===n?[p(t,r)]:[[p(t,r),"=",p(n,r)].join("")])}}}}(t),a={},i=0,c=Object.keys(r);i<c.length;i++){var u=c[i];e(u)||(a[u]=r[u])}var s=Object.keys(a);return!1!==t.sort&&s.sort(t.sort),s.map((function(e){var o=r[e];return void 0===o?"":null===o?p(e,t):Array.isArray(o)?0===o.length&&"bracket-separator"===t.arrayFormat?p(e,t)+"[]":o.reduce(n(e),[]).join("&"):p(e,t)+"="+p(o,t)})).filter((function(r){return r.length>0})).join("&")},t.parseUrl=function(r,t){t=Object.assign({decode:!0},t);var e=s(r,"#"),o=n(e,2),a=o[0],i=o[1];return Object.assign({url:a.split("?")[0]||"",query:b(d(r),t)},t&&t.parseFragmentIdentifier&&i?{fragmentIdentifier:y(i,t)}:{})},t.stringifyUrl=function(r,e){e=Object.assign({encode:!0,strict:!0},e);var n=m(r.url).split("?")[0]||"",o=t.extract(r.url),a=t.parse(o,{sort:!1}),i=Object.assign(a,r.query),c=t.stringify(i,e);c&&(c="?".concat(c));var u=function(r){var t="",e=r.indexOf("#");return-1!==e&&(t=r.slice(e)),t}(r.url);return r.fragmentIdentifier&&(u="#".concat(p(r.fragmentIdentifier,e))),"".concat(n).concat(c).concat(u)},t.pick=function(r,e,n){n=Object.assign({parseFragmentIdentifier:!0},n);var o=t.parseUrl(r,n),a=o.url,i=o.query,c=o.fragmentIdentifier;return t.stringifyUrl({url:a,query:l(i,e),fragmentIdentifier:c},n)},t.exclude=function(r,e,n){var o=Array.isArray(e)?function(r){return!e.includes(r)}:function(r,t){return!e(r,t)};return t.pick(r,o,n)}},m0LI:function(r,t){r.exports=function(r,t){if("undefined"!=typeof Symbol&&Symbol.iterator in Object(r)){var e=[],n=!0,o=!1,a=void 0;try{for(var i,c=r[Symbol.iterator]();!(n=(i=c.next()).done)&&(e.push(i.value),!t||e.length!==t);n=!0);}catch(u){o=!0,a=u}finally{try{n||null==c.return||c.return()}finally{if(o)throw a}}return e}}},wTVA:function(r,t){r.exports=function(r){if(Array.isArray(r))return r}},wkBT:function(r,t){r.exports=function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}}}]);
//# sourceMappingURL=component---src-pages-publication-js-4b56b26048feb051b985.js.map |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from .. import models
class JobsOperations(object):
"""JobsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: The Version of the API to be used with the client request. Constant value: "2018-06-01-preview".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2018-06-01-preview"
self.config = config
def list(
self, resource_group_name, account_name, transform_name, filter=None, top=None, skip=None, custom_headers=None, raw=False, **operation_config):
"""List Jobs.
Lists all of the Jobs for the Transform.
:param resource_group_name: The name of the resource group within the
Azure subscription.
:type resource_group_name: str
:param account_name: The Media Services account name.
:type account_name: str
:param transform_name: The Transform name.
:type transform_name: str
:param filter: Restricts the set of items returned.
:type filter: str
:param top: Specifies a non-negative integer n that limits the number
of items returned from a collection. The service returns the number of
available items up to but not greater than the specified value n.
:type top: int
:param skip: Specifies a non-negative integer n that excludes the
first n items of the queried collection from the result. The service
returns items starting at position n+1.
:type skip: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Job
:rtype:
~azure.mgmt.media.models.JobPaged[~azure.mgmt.media.models.Job]
:raises:
:class:`ApiErrorException<azure.mgmt.media.models.ApiErrorException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'accountName': self._serialize.url("account_name", account_name, 'str'),
'transformName': self._serialize.url("transform_name", transform_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ApiErrorException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.JobPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.JobPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Media/mediaServices/{accountName}/transforms/{transformName}/jobs'}
def get(
self, resource_group_name, account_name, transform_name, job_name, custom_headers=None, raw=False, **operation_config):
"""Get Job.
Gets a Job.
:param resource_group_name: The name of the resource group within the
Azure subscription.
:type resource_group_name: str
:param account_name: The Media Services account name.
:type account_name: str
:param transform_name: The Transform name.
:type transform_name: str
:param job_name: The Job name.
:type job_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Job or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.media.models.Job or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ApiErrorException<azure.mgmt.media.models.ApiErrorException>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'accountName': self._serialize.url("account_name", account_name, 'str'),
'transformName': self._serialize.url("transform_name", transform_name, 'str'),
'jobName': self._serialize.url("job_name", job_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 404]:
raise models.ApiErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Job', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Media/mediaServices/{accountName}/transforms/{transformName}/jobs/{jobName}'}
def create(
self, resource_group_name, account_name, transform_name, job_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Create Job.
Creates a Job.
:param resource_group_name: The name of the resource group within the
Azure subscription.
:type resource_group_name: str
:param account_name: The Media Services account name.
:type account_name: str
:param transform_name: The Transform name.
:type transform_name: str
:param job_name: The Job name.
:type job_name: str
:param parameters: The request parameters
:type parameters: ~azure.mgmt.media.models.Job
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Job or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.media.models.Job or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ApiErrorException<azure.mgmt.media.models.ApiErrorException>`
"""
# Construct URL
url = self.create.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'accountName': self._serialize.url("account_name", account_name, 'str'),
'transformName': self._serialize.url("transform_name", transform_name, 'str'),
'jobName': self._serialize.url("job_name", job_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'Job')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [201]:
raise models.ApiErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 201:
deserialized = self._deserialize('Job', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Media/mediaServices/{accountName}/transforms/{transformName}/jobs/{jobName}'}
def delete(
self, resource_group_name, account_name, transform_name, job_name, custom_headers=None, raw=False, **operation_config):
"""Delete Job.
Deletes a Job.
:param resource_group_name: The name of the resource group within the
Azure subscription.
:type resource_group_name: str
:param account_name: The Media Services account name.
:type account_name: str
:param transform_name: The Transform name.
:type transform_name: str
:param job_name: The Job name.
:type job_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ApiErrorException<azure.mgmt.media.models.ApiErrorException>`
"""
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'accountName': self._serialize.url("account_name", account_name, 'str'),
'transformName': self._serialize.url("transform_name", transform_name, 'str'),
'jobName': self._serialize.url("job_name", job_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 204]:
raise models.ApiErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Media/mediaServices/{accountName}/transforms/{transformName}/jobs/{jobName}'}
def cancel_job(
self, resource_group_name, account_name, transform_name, job_name, custom_headers=None, raw=False, **operation_config):
"""Cancel Job.
Cancel a Job.
:param resource_group_name: The name of the resource group within the
Azure subscription.
:type resource_group_name: str
:param account_name: The Media Services account name.
:type account_name: str
:param transform_name: The Transform name.
:type transform_name: str
:param job_name: The Job name.
:type job_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ApiErrorException<azure.mgmt.media.models.ApiErrorException>`
"""
# Construct URL
url = self.cancel_job.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'accountName': self._serialize.url("account_name", account_name, 'str'),
'transformName': self._serialize.url("transform_name", transform_name, 'str'),
'jobName': self._serialize.url("job_name", job_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ApiErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
cancel_job.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Media/mediaServices/{accountName}/transforms/{transformName}/jobs/{jobName}/cancelJob'}
|
import firebase from 'firebase/app';
import 'firebase/firestore';
export default function initDb() {
const config = {
apiKey: process.env.API_KEY,
authDomain: process.env.AUTH_DOMAIN,
databaseURL: process.env.DATABASE_URL,
projectId: process.env.PROJECT_ID,
};
if (!firebase.apps.length) {
firebase.initializeApp(config);
} else {
firebase.app();
}
return firebase;
}
|
from model import common
import torch.nn as nn
import torch
url = {
'r16f64x2': 'https://cv.snu.ac.kr/research/EDSR/models/edsr_baseline_x2-1bc95232.pt',
'r16f64x3': 'https://cv.snu.ac.kr/research/EDSR/models/edsr_baseline_x3-abf2a44e.pt',
'r16f64x4': 'https://cv.snu.ac.kr/research/EDSR/models/edsr_baseline_x4-6b446fab.pt',
'r32f256x2': 'https://cv.snu.ac.kr/research/EDSR/models/edsr_x2-0edfb8a3.pt',
'r32f256x3': 'https://cv.snu.ac.kr/research/EDSR/models/edsr_x3-ea3ef2c6.pt',
'r32f256x4': 'https://cv.snu.ac.kr/research/EDSR/models/edsr_x4-4f62e9ef.pt'
}
def make_model(args, parent=False):
return EDSR(args)
class EDSR(nn.Module):
def __init__(self, args, conv=common.default_conv):
super(EDSR, self).__init__()
n_resblocks = args.n_resblocks
n_feats = args.n_feats
kernel_size = 3
scale = args.scale[0]
act = nn.ReLU(True)
# self.url = url['r{}f{}x{}'.format(n_resblocks, n_feats, scale)]
self.sub_mean = common.MeanShift(args.rgb_range)
self.add_mean = common.MeanShift(args.rgb_range, sign=1)
# define head module
m_head = [conv(args.n_colors, n_feats, kernel_size)]
# define body module
self.m_body0 = common.ResBlock(
conv, n_feats, kernel_size, act=act, res_scale=args.res_scale)
self.m_downsample0 = conv(n_feats, n_feats//4, 1)
self.m_body1 = common.ResBlock(
conv, n_feats//4, kernel_size, act=act, res_scale=args.res_scale)
self.m_downsample1 = conv(n_feats//4, n_feats, 1)
# self.tail1 = conv(n_feats//4, n_feats//16, kernel_size)
self.m_body2 = common.ResBlock(
conv, n_feats, kernel_size, act=act, res_scale=args.res_scale)
self.m_downsample2 = conv(n_feats, n_feats//4, 1)
self.m_body3 = common.ResBlock(
conv, n_feats//4, kernel_size, act=act, res_scale=args.res_scale)
self.m_downsample3 = conv(n_feats//4, n_feats, 1)
# self.tail3 = conv(n_feats//4, n_feats//16, kernel_size)
self.m_body4 = common.ResBlock(
conv, n_feats, kernel_size, act=act, res_scale=args.res_scale)
self.m_downsample4 = conv(n_feats, n_feats//4, 1)
self.m_body5 = common.ResBlock(
conv, n_feats//4, kernel_size, act=act, res_scale=args.res_scale)
self.m_downsample5 = conv(n_feats//4, n_feats, 1)
# self.tail5 = conv(n_feats//4, n_feats//16, kernel_size)
m_body = [conv(n_feats, n_feats, kernel_size)]
# define tail module
m_tail = [
# common.Upsampler(conv, scale, n_feats, act=False),
conv(n_feats, args.n_colors, kernel_size)
]
# self.refine = conv(n_feats//4, args.n_colors, kernel_size)
self.head = nn.Sequential(*m_head)
self.body = nn.Sequential(*m_body)
self.tail = nn.Sequential(*m_tail)
def forward(self, x):
x = self.sub_mean(x)
x = self.head(x)
body0 = self.m_body0(x)
m_downsample0 = self.m_downsample0(body0)
body1 = self.m_body1(m_downsample0)
m_downsample1 = self.m_downsample1(body1)
# m_tail1 = self.tail1(body1)
body2 = self.m_body2(m_downsample1+body0)
m_downsample2 = self.m_downsample2(body2)
body3 = self.m_body3(m_downsample2+m_downsample0)
m_downsample3 = self.m_downsample3(body3)
# m_tail3 = self.tail1(body3)
body4 = self.m_body4(m_downsample3+m_downsample1)
m_downsample4 = self.m_downsample4(body4)
body5 = self.m_body5(m_downsample4+m_downsample2+m_downsample0)
m_downsample5 = self.m_downsample5(body5)
# m_tail5 = self.tail1(body5)
res = self.body(m_downsample5+m_downsample3+m_downsample1)
res += x
y = self.tail(res)
# y = torch.cat([x, m_tail1, m_tail3, m_tail5], 1)
# y = self.refine(x)
y = self.add_mean(y)
return y
def load_state_dict(self, state_dict, strict=True):
own_state = self.state_dict()
for name, param in state_dict.items():
if name in own_state:
if isinstance(param, nn.Parameter):
param = param.data
try:
own_state[name].copy_(param)
except Exception:
if name.find('tail') == -1:
raise RuntimeError('While copying the parameter named {}, '
'whose dimensions in the model are {} and '
'whose dimensions in the checkpoint are {}.'
.format(name, own_state[name].size(), param.size()))
elif strict:
if name.find('tail') == -1:
raise KeyError('unexpected key "{}" in state_dict'
.format(name))
|
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_RENDERER_DOM_STORAGE_LOCAL_STORAGE_NAMESPACE_H_
#define CONTENT_RENDERER_DOM_STORAGE_LOCAL_STORAGE_NAMESPACE_H_
#include "base/macros.h"
#include "third_party/blink/public/platform/web_security_origin.h"
#include "third_party/blink/public/platform/web_storage_namespace.h"
namespace content {
class LocalStorageCachedAreas;
class LocalStorageNamespace : public blink::WebStorageNamespace {
public:
// |local_storage_cached_areas| is guaranteed to outlive this object.
explicit LocalStorageNamespace(
LocalStorageCachedAreas* local_storage_cached_areas);
~LocalStorageNamespace() override;
// blink::WebStorageNamespace:
blink::WebStorageArea* CreateStorageArea(
const blink::WebSecurityOrigin& origin) override;
bool IsSameNamespace(const WebStorageNamespace&) const override;
private:
LocalStorageCachedAreas* const local_storage_cached_areas_;
DISALLOW_COPY_AND_ASSIGN(LocalStorageNamespace);
};
} // namespace content
#endif // CONTENT_RENDERER_DOM_STORAGE_LOCAL_STORAGE_NAMESPACE_H_
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Script to plot the loss as a function of epoch (Reproduces Figure 2 of the article)
"""
import tensorflow as tf
import argparse
import matplotlib as mpl
import matplotlib.pyplot as plt
mpl.rcParams.update({'font.size': 7})
import fnmatch
import os
import numpy as np
from vrmslearn.ModelParameters import ModelParameters
from vrmslearn.SeismicGenerator import SeismicGenerator
from vrmslearn.Tester import Tester
from vrmslearn.RCNN import RCNN
from vrmslearn.ModelGenerator import interval_velocity_time
import h5py as h5
def get_test_error(dirlog, savepath, dataset_path):
"""
Compute the error on a test set
@params:
dirlog (str) : Directory containing the trained model.
savepath (str): Directory in which to save the predictions
dataset_path (str): Directory of the test set
@returns:
vrms_rmse (float) : RMSE for Vrms
vint_rmse: RMSE for Vint
true_pos: Primary reflection identification: Ratio of true positive
true_neg: Primary reflection identification: Ratio of true negative
false_pos: Primary reflection identification: Ratio of false positive
false_neg: Primary reflection identification: Ratio of false negative
"""
if os.path.isfile(savepath + ".hdf5"):
file = h5.File(savepath + ".hdf5")
vint_rmse = file["vint_rmse"].value
vrms_rmse = file["vrms_rmse"].value
true_pos = file["true_pos"].value
true_neg = file["true_neg"].value
false_pos = file["false_pos"].value
false_neg = file["false_neg"].value
file.close()
else:
pars = ModelParameters()
pars.read_parameters_from_disk(dataset_path+"/model_parameters.hdf5")
seismic_gen = SeismicGenerator(model_parameters=pars)
nn = RCNN(input_size=seismic_gen.image_size,
batch_size=100)
tester = Tester(NN=nn, data_generator=seismic_gen)
toeval = [nn.output_ref, nn.output_vrms, nn.output_vint]
toeval_names = ["ref", "vrms", "vint"]
vint_rmse_all = 0
vrms_rmse_all = 0
true_pos_all = 0
true_neg_all = 0
false_pos_all = 0
false_neg_all = 0
tester.test_dataset(savepath=savepath,
toeval=toeval,
toeval_names=toeval_names,
restore_from=dirlog,
testpath = dataset_path)
vp, vint_pred, masks, lfiles, pfiles = tester.get_preds(labelname="vp",
predname="vint",
maskname="valid",
savepath=savepath,
testpath = dataset_path)
vrms, vrms_pred, _, _ , _ = tester.get_preds(labelname="vrms",
predname="vrms",
savepath=savepath,
testpath = dataset_path)
ref, ref_pred, _, _ , _ = tester.get_preds(labelname="tlabels",
predname="ref",
savepath=savepath,
testpath = dataset_path)
vint = [None for _ in range(len(vp))]
for ii in range(len(vint)):
vint[ii] = interval_velocity_time(vp[ii], pars=pars)
vint[ii] = vint[ii][::pars.resampling]
vint_pred[ii] = vint_pred[ii]*(pars.vp_max - pars.vp_min) + pars.vp_min
vrms_pred[ii] = vrms_pred[ii] * (pars.vp_max - pars.vp_min) + pars.vp_min
vrms[ii] = vrms[ii] * (pars.vp_max - pars.vp_min) + pars.vp_min
ref_pred[ii] = np.argmax(ref_pred[ii], axis=1)
ind0 = np.nonzero(ref[ii])[0][0]
masks[ii][0:ind0] = 0
vint = np.array(vint)
vint_pred = np.array(vint_pred)
vrms = np.array(vrms)
vrms_pred = np.array(vrms_pred)
ref = np.array(ref)
ref_pred = np.array(ref_pred)
masks = np.array(masks)
nsamples = np.sum(masks == 1)
vint_rmse = np.sqrt(np.sum(masks * (vint - vint_pred)**2) / nsamples)
vrms_rmse = np.sqrt(np.sum(masks * (vrms - vrms_pred) ** 2) / nsamples)
nsamples = ref.flatten().shape[0]
true_pos = np.sum(((ref - ref_pred) == 0) * (ref == 1)) / nsamples
true_neg = np.sum(((ref - ref_pred) == 0) * (ref == 0)) / nsamples
false_pos = np.sum((ref - ref_pred) == -1) / nsamples
false_neg = np.sum((ref - ref_pred) == 1) / nsamples
file = h5.File(savepath + ".hdf5")
file["vint_rmse"] = vint_rmse
file["vrms_rmse"] = vrms_rmse
file["true_pos"] = true_pos
file["true_neg"] = true_neg
file["false_pos"] = false_pos
file["false_neg"] = false_neg
file.close()
return vrms_rmse, vint_rmse, true_pos, true_neg, false_pos, false_neg
if __name__ == "__main__":
# Initialize argument parser
parser = argparse.ArgumentParser()
# Add arguments to parse for training
parser.add_argument(
"--logdir",
type=str,
default="Case_article0",
help="name of the directory to save logs : str"
)
parser.add_argument(
"--dataset_path",
type=str,
default="dataset_article/test/dhmin5layer_num_min10",
help="path of the test dataset"
)
# Parse the input for training parameters
args, unparsed = parser.parse_known_args()
training_size = 40000
batch_size = 40
savefile = "Paper/Fig/Case4_loss"
# Obtain all subdirectories containing tensorflow models inside args.logdir.
dirs = []
dir_models = {}
for dir1 in os.listdir(args.logdir):
path1 = os.path.join(args.logdir, dir1)
if os.path.isdir(path1):
files = []
for dir2 in os.listdir(path1):
path2 = os.path.join(path1, dir2)
if os.path.isfile(path2):
files.append(path2)
efiles = fnmatch.filter(files, os.path.join(path1,"events.*"))
efiles.sort()
dirs.append(efiles)
allmodels = fnmatch.filter(files, os.path.join(path1,"model.ckpt-*.meta"))
allmodels.sort()
dir_models[dirs[-1][-1]] = [a[:-5] for a in allmodels]
for dir in dirs:
print(dir)
# Create the figure
fig, ax = plt.subplots(3, 1, figsize=[8 / 2.54, 12 / 2.54])
step0= 0
plots = [[] for _ in range(3)]
labels = ["Phase 0", "Phase 1", "Phase 2"]
for ii, dir in enumerate(dirs[:-2]):
step = []
loss = []
# Get Loss for each stage of training and each iteration
for e in dir:
for summary in tf.train.summary_iterator(e):
for v in summary.summary.value:
if v.tag == 'Loss_Function/loss':
loss.append(v.simple_value)
step.append(summary.step + step0)
inds = np.argsort(step)
step = np.array(step)[inds][1:]
loss = np.array(loss)[inds][1:]
plots[ii], = ax[0].semilogy(step * batch_size /training_size, loss, basey=2)
if ii!=0:
steprms0 = steprms[-1]
vrms0 = vrms[-1]
vint0 = vint[-1]
# Compute test set error for each model during training (or retrieve it)
steprms = []
vrms = []
vint = []
for dirlog in dir_models[dir[-1]]:
savepath = dirlog + "_test/" + args.dataset_path
if not os.path.isdir(savepath):
os.makedirs(savepath)
vrms_rmse, vint_rmse, _, _, _, _ = get_test_error(dirlog, savepath, args.dataset_path)
steprms.append(int(dirlog.split("-")[-1]) + step0)
vrms.append(vrms_rmse)
vint.append(vint_rmse)
inds = np.argsort(steprms)
steprms = np.array(steprms)[inds][1:]
vrms = np.array(vrms)[inds][1:]
vint = np.array(vint)[inds][1:]
if ii!=0:
steprms = np.insert(steprms, 0, steprms0)
vrms = np.insert(vrms, 0, vrms0)
vint = np.insert(vint, 0, vint0)
ax[1].plot(steprms * batch_size /training_size, vrms)
ax[2].plot(steprms * batch_size /training_size, vint)
step0 = step[-1]
# Figure presentation
ax[0].set_xlabel("Epoch")
ax[0].set_ylabel("Loss")
ax[1].set_xlabel("Epoch")
ax[1].set_ylabel("RMSE (m/s)")
ax[2].set_xlabel("Epoch")
ax[2].set_ylabel("RMSE (m/s)")
ax[0].legend(plots, labels,
loc='upper right',
bbox_to_anchor=(1.15, 1.35),
handlelength=0.4)
plt.tight_layout(rect=[0.001, 0, 0.9999, 1])
plt.savefig(savefile, dpi=600)
plt.savefig(savefile+"_lowres", dpi=100)
plt.show()
|
"""
This script creates a request.folder_id column which is a foreign
key to the library_folder table. This also adds a 'type' and 'layout' column
to the form_definition table.
"""
from __future__ import print_function
import logging
from sqlalchemy import (
Column,
ForeignKey,
Integer,
MetaData,
Table
)
from galaxy.model.custom_types import (
JSONType,
TrimmedString
)
from galaxy.model.migrate.versions.util import (
add_column,
)
log = logging.getLogger(__name__)
metadata = MetaData()
def upgrade(migrate_engine):
print(__doc__)
metadata.bind = migrate_engine
metadata.reflect()
# Create the folder_id column
col = Column("folder_id", Integer, ForeignKey('library_folder.id'), index=True)
add_column(col, 'request', metadata, index_name='ix_request_folder_id')
# Create the type column in form_definition
FormDefinition_table = Table("form_definition", metadata, autoload=True)
col = Column("type", TrimmedString(255), index=True)
add_column(col, FormDefinition_table, metadata, index_name='ix_form_definition_type')
col = Column("layout", JSONType())
add_column(col, FormDefinition_table, metadata)
def downgrade(migrate_engine):
pass
|
/***************************************************************************
* _ _ ____ _
* Project ___| | | | _ \| |
* / __| | | | |_) | |
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 2012 - 2016, Daniel Stenberg, <[email protected]>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
* furnished to do so, under the terms of the COPYING file.
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
* KIND, either express or implied.
*
* RFC2195 CRAM-MD5 authentication
* RFC2617 Basic and Digest Access Authentication
* RFC2831 DIGEST-MD5 authentication
* RFC4422 Simple Authentication and Security Layer (SASL)
* RFC4616 PLAIN authentication
* RFC6749 OAuth 2.0 Authorization Framework
* RFC7628 A Set of SASL Mechanisms for OAuth
* Draft LOGIN SASL Mechanism <draft-murchison-sasl-login-00.txt>
*
***************************************************************************/
#include "curl_setup.h"
#include <curl/curl.h>
#include "urldata.h"
#include "curl_base64.h"
#include "curl_md5.h"
#include "vauth/vauth.h"
#include "vtls/vtls.h"
#include "curl_hmac.h"
#include "curl_sasl.h"
#include "warnless.h"
#include "strtok.h"
#include "strequal.h"
#include "rawstr.h"
#include "sendf.h"
#include "non-ascii.h" /* included for Curl_convert_... prototypes */
/* The last 3 #include files should be in this order */
#include "curl_printf.h"
#include "curl_memory.h"
#include "memdebug.h"
/* Supported mechanisms */
const struct {
const char *name; /* Name */
size_t len; /* Name length */
unsigned int bit; /* Flag bit */
} mechtable[] = {
{ "LOGIN", 5, SASL_MECH_LOGIN },
{ "PLAIN", 5, SASL_MECH_PLAIN },
{ "CRAM-MD5", 8, SASL_MECH_CRAM_MD5 },
{ "DIGEST-MD5", 10, SASL_MECH_DIGEST_MD5 },
{ "GSSAPI", 6, SASL_MECH_GSSAPI },
{ "EXTERNAL", 8, SASL_MECH_EXTERNAL },
{ "NTLM", 4, SASL_MECH_NTLM },
{ "XOAUTH2", 7, SASL_MECH_XOAUTH2 },
{ "OAUTHBEARER", 11, SASL_MECH_OAUTHBEARER },
{ ZERO_NULL, 0, 0 }
};
/*
* Curl_sasl_cleanup()
*
* This is used to cleanup any libraries or curl modules used by the sasl
* functions.
*
* Parameters:
*
* conn [in] - The connection data.
* authused [in] - The authentication mechanism used.
*/
void Curl_sasl_cleanup(struct connectdata *conn, unsigned int authused)
{
#if defined(USE_KERBEROS5)
/* Cleanup the gssapi structure */
if(authused == SASL_MECH_GSSAPI) {
Curl_auth_gssapi_cleanup(&conn->krb5);
}
#endif
#if defined(USE_NTLM)
/* Cleanup the NTLM structure */
if(authused == SASL_MECH_NTLM) {
Curl_auth_ntlm_cleanup(&conn->ntlm);
}
#endif
#if !defined(USE_KERBEROS5) && !defined(USE_NTLM)
/* Reserved for future use */
(void)conn;
(void)authused;
#endif
}
/*
* Curl_sasl_decode_mech()
*
* Convert a SASL mechanism name into a token.
*
* Parameters:
*
* ptr [in] - The mechanism string.
* maxlen [in] - Maximum mechanism string length.
* len [out] - If not NULL, effective name length.
*
* Returns the SASL mechanism token or 0 if no match.
*/
unsigned int Curl_sasl_decode_mech(const char *ptr, size_t maxlen, size_t *len)
{
unsigned int i;
char c;
for(i = 0; mechtable[i].name; i++) {
if(maxlen >= mechtable[i].len &&
!memcmp(ptr, mechtable[i].name, mechtable[i].len)) {
if(len)
*len = mechtable[i].len;
if(maxlen == mechtable[i].len)
return mechtable[i].bit;
c = ptr[mechtable[i].len];
if(!ISUPPER(c) && !ISDIGIT(c) && c != '-' && c != '_')
return mechtable[i].bit;
}
}
return 0;
}
/*
* Curl_sasl_parse_url_auth_option()
*
* Parse the URL login options.
*/
CURLcode Curl_sasl_parse_url_auth_option(struct SASL *sasl,
const char *value, size_t len)
{
CURLcode result = CURLE_OK;
unsigned int mechbit;
size_t mechlen;
if(!len)
return CURLE_URL_MALFORMAT;
if(sasl->resetprefs) {
sasl->resetprefs = FALSE;
sasl->prefmech = SASL_AUTH_NONE;
}
if(strnequal(value, "*", len))
sasl->prefmech = SASL_AUTH_DEFAULT;
else {
mechbit = Curl_sasl_decode_mech(value, len, &mechlen);
if(mechbit && mechlen == len)
sasl->prefmech |= mechbit;
else
result = CURLE_URL_MALFORMAT;
}
return result;
}
/*
* Curl_sasl_init()
*
* Initializes the SASL structure.
*/
void Curl_sasl_init(struct SASL *sasl, const struct SASLproto *params)
{
sasl->params = params; /* Set protocol dependent parameters */
sasl->state = SASL_STOP; /* Not yet running */
sasl->authmechs = SASL_AUTH_NONE; /* No known authentication mechanism yet */
sasl->prefmech = SASL_AUTH_DEFAULT; /* Prefer all mechanisms */
sasl->authused = SASL_AUTH_NONE; /* No the authentication mechanism used */
sasl->resetprefs = TRUE; /* Reset prefmech upon AUTH parsing. */
sasl->mutual_auth = FALSE; /* No mutual authentication (GSSAPI only) */
sasl->force_ir = FALSE; /* Respect external option */
}
/*
* state()
*
* This is the ONLY way to change SASL state!
*/
static void state(struct SASL *sasl, struct connectdata *conn,
saslstate newstate)
{
#if defined(DEBUGBUILD) && !defined(CURL_DISABLE_VERBOSE_STRINGS)
/* for debug purposes */
static const char * const names[]={
"STOP",
"PLAIN",
"LOGIN",
"LOGIN_PASSWD",
"EXTERNAL",
"CRAMMD5",
"DIGESTMD5",
"DIGESTMD5_RESP",
"NTLM",
"NTLM_TYPE2MSG",
"GSSAPI",
"GSSAPI_TOKEN",
"GSSAPI_NO_DATA",
"OAUTH2",
"OAUTH2_RESP",
"CANCEL",
"FINAL",
/* LAST */
};
if(sasl->state != newstate)
infof(conn->data, "SASL %p state change from %s to %s\n",
(void *)sasl, names[sasl->state], names[newstate]);
#else
(void) conn;
#endif
sasl->state = newstate;
}
/*
* Curl_sasl_can_authenticate()
*
* Check if we have enough auth data and capabilities to authenticate.
*/
bool Curl_sasl_can_authenticate(struct SASL *sasl, struct connectdata *conn)
{
/* Have credentials been provided? */
if(conn->bits.user_passwd)
return TRUE;
/* EXTERNAL can authenticate without a user name and/or password */
if(sasl->authmechs & sasl->prefmech & SASL_MECH_EXTERNAL)
return TRUE;
return FALSE;
}
/*
* Curl_sasl_start()
*
* Calculate the required login details for SASL authentication.
*/
CURLcode Curl_sasl_start(struct SASL *sasl, struct connectdata *conn,
bool force_ir, saslprogress *progress)
{
CURLcode result = CURLE_OK;
struct Curl_easy *data = conn->data;
unsigned int enabledmechs;
const char *mech = NULL;
char *resp = NULL;
size_t len = 0;
saslstate state1 = SASL_STOP;
saslstate state2 = SASL_FINAL;
#if defined(USE_KERBEROS5)
const char* service = data->set.str[STRING_SERVICE_NAME] ?
data->set.str[STRING_SERVICE_NAME] :
sasl->params->service;
#endif
sasl->force_ir = force_ir; /* Latch for future use */
sasl->authused = 0; /* No mechanism used yet */
enabledmechs = sasl->authmechs & sasl->prefmech;
*progress = SASL_IDLE;
/* Calculate the supported authentication mechanism, by decreasing order of
security, as well as the initial response where appropriate */
if((enabledmechs & SASL_MECH_EXTERNAL) && !conn->passwd[0]) {
mech = SASL_MECH_STRING_EXTERNAL;
state1 = SASL_EXTERNAL;
sasl->authused = SASL_MECH_EXTERNAL;
if(force_ir || data->set.sasl_ir)
result = Curl_auth_create_external_message(data, conn->user, &resp,
&len);
}
else if(conn->bits.user_passwd) {
#if defined(USE_KERBEROS5)
if((enabledmechs & SASL_MECH_GSSAPI) && Curl_auth_is_gssapi_supported() &&
Curl_auth_user_contains_domain(conn->user)) {
sasl->mutual_auth = FALSE; /* TODO: Calculate mutual authentication */
mech = SASL_MECH_STRING_GSSAPI;
state1 = SASL_GSSAPI;
state2 = SASL_GSSAPI_TOKEN;
sasl->authused = SASL_MECH_GSSAPI;
if(force_ir || data->set.sasl_ir)
result = Curl_auth_create_gssapi_user_message(data, conn->user,
conn->passwd,
service,
data->easy_conn->
host.name,
sasl->mutual_auth,
NULL, &conn->krb5,
&resp, &len);
}
else
#endif
#ifndef CURL_DISABLE_CRYPTO_AUTH
if((enabledmechs & SASL_MECH_DIGEST_MD5) &&
Curl_auth_is_digest_supported()) {
mech = SASL_MECH_STRING_DIGEST_MD5;
state1 = SASL_DIGESTMD5;
sasl->authused = SASL_MECH_DIGEST_MD5;
}
else if(enabledmechs & SASL_MECH_CRAM_MD5) {
mech = SASL_MECH_STRING_CRAM_MD5;
state1 = SASL_CRAMMD5;
sasl->authused = SASL_MECH_CRAM_MD5;
}
else
#endif
#ifdef USE_NTLM
if((enabledmechs & SASL_MECH_NTLM) && Curl_auth_is_ntlm_supported()) {
mech = SASL_MECH_STRING_NTLM;
state1 = SASL_NTLM;
state2 = SASL_NTLM_TYPE2MSG;
sasl->authused = SASL_MECH_NTLM;
if(force_ir || data->set.sasl_ir)
result = Curl_auth_create_ntlm_type1_message(conn->user, conn->passwd,
&conn->ntlm, &resp, &len);
}
else
#endif
if((enabledmechs & SASL_MECH_OAUTHBEARER) && conn->oauth_bearer) {
mech = SASL_MECH_STRING_OAUTHBEARER;
state1 = SASL_OAUTH2;
state2 = SASL_OAUTH2_RESP;
sasl->authused = SASL_MECH_OAUTHBEARER;
if(force_ir || data->set.sasl_ir)
result = Curl_auth_create_oauth_bearer_message(data, conn->user,
conn->host.name,
conn->port,
conn->oauth_bearer,
&resp, &len);
}
else if((enabledmechs & SASL_MECH_XOAUTH2) && conn->oauth_bearer) {
mech = SASL_MECH_STRING_XOAUTH2;
state1 = SASL_OAUTH2;
sasl->authused = SASL_MECH_XOAUTH2;
if(force_ir || data->set.sasl_ir)
result = Curl_auth_create_oauth_bearer_message(data, conn->user,
NULL, 0,
conn->oauth_bearer,
&resp, &len);
}
else if(enabledmechs & SASL_MECH_LOGIN) {
mech = SASL_MECH_STRING_LOGIN;
state1 = SASL_LOGIN;
state2 = SASL_LOGIN_PASSWD;
sasl->authused = SASL_MECH_LOGIN;
if(force_ir || data->set.sasl_ir)
result = Curl_auth_create_login_message(data, conn->user, &resp, &len);
}
else if(enabledmechs & SASL_MECH_PLAIN) {
mech = SASL_MECH_STRING_PLAIN;
state1 = SASL_PLAIN;
sasl->authused = SASL_MECH_PLAIN;
if(force_ir || data->set.sasl_ir)
result = Curl_auth_create_plain_message(data, conn->user, conn->passwd,
&resp, &len);
}
}
if(!result && mech) {
if(resp && sasl->params->maxirlen &&
strlen(mech) + len > sasl->params->maxirlen) {
free(resp);
resp = NULL;
}
result = sasl->params->sendauth(conn, mech, resp);
if(!result) {
*progress = SASL_INPROGRESS;
state(sasl, conn, resp ? state2 : state1);
}
}
free(resp);
return result;
}
/*
* Curl_sasl_continue()
*
* Continue the authentication.
*/
CURLcode Curl_sasl_continue(struct SASL *sasl, struct connectdata *conn,
int code, saslprogress *progress)
{
CURLcode result = CURLE_OK;
struct Curl_easy *data = conn->data;
saslstate newstate = SASL_FINAL;
char *resp = NULL;
#if !defined(CURL_DISABLE_CRYPTO_AUTH)
char *serverdata;
char *chlg = NULL;
size_t chlglen = 0;
#endif
#if !defined(CURL_DISABLE_CRYPTO_AUTH) || defined(USE_KERBEROS5)
const char *service = data->set.str[STRING_SERVICE_NAME] ?
data->set.str[STRING_SERVICE_NAME] :
sasl->params->service;
#endif
size_t len = 0;
*progress = SASL_INPROGRESS;
if(sasl->state == SASL_FINAL) {
if(code != sasl->params->finalcode)
result = CURLE_LOGIN_DENIED;
*progress = SASL_DONE;
state(sasl, conn, SASL_STOP);
return result;
}
if(sasl->state != SASL_CANCEL && sasl->state != SASL_OAUTH2_RESP &&
code != sasl->params->contcode) {
*progress = SASL_DONE;
state(sasl, conn, SASL_STOP);
return CURLE_LOGIN_DENIED;
}
switch(sasl->state) {
case SASL_STOP:
*progress = SASL_DONE;
return result;
case SASL_PLAIN:
result = Curl_auth_create_plain_message(data, conn->user, conn->passwd,
&resp,
&len);
break;
case SASL_LOGIN:
result = Curl_auth_create_login_message(data, conn->user, &resp, &len);
newstate = SASL_LOGIN_PASSWD;
break;
case SASL_LOGIN_PASSWD:
result = Curl_auth_create_login_message(data, conn->passwd, &resp, &len);
break;
case SASL_EXTERNAL:
result = Curl_auth_create_external_message(data, conn->user, &resp, &len);
break;
#ifndef CURL_DISABLE_CRYPTO_AUTH
case SASL_CRAMMD5:
sasl->params->getmessage(data->state.buffer, &serverdata);
result = Curl_auth_decode_cram_md5_message(serverdata, &chlg, &chlglen);
if(!result)
result = Curl_auth_create_cram_md5_message(data, chlg, conn->user,
conn->passwd, &resp, &len);
free(chlg);
break;
case SASL_DIGESTMD5:
sasl->params->getmessage(data->state.buffer, &serverdata);
result = Curl_auth_create_digest_md5_message(data, serverdata,
conn->user, conn->passwd,
service,
&resp, &len);
newstate = SASL_DIGESTMD5_RESP;
break;
case SASL_DIGESTMD5_RESP:
resp = strdup("");
if(!resp)
result = CURLE_OUT_OF_MEMORY;
break;
#endif
#ifdef USE_NTLM
case SASL_NTLM:
/* Create the type-1 message */
result = Curl_auth_create_ntlm_type1_message(conn->user, conn->passwd,
&conn->ntlm, &resp, &len);
newstate = SASL_NTLM_TYPE2MSG;
break;
case SASL_NTLM_TYPE2MSG:
/* Decode the type-2 message */
sasl->params->getmessage(data->state.buffer, &serverdata);
result = Curl_auth_decode_ntlm_type2_message(data, serverdata,
&conn->ntlm);
if(!result)
result = Curl_auth_create_ntlm_type3_message(data, conn->user,
conn->passwd, &conn->ntlm,
&resp, &len);
break;
#endif
#if defined(USE_KERBEROS5)
case SASL_GSSAPI:
result = Curl_auth_create_gssapi_user_message(data, conn->user,
conn->passwd,
service,
data->easy_conn->host.name,
sasl->mutual_auth, NULL,
&conn->krb5,
&resp, &len);
newstate = SASL_GSSAPI_TOKEN;
break;
case SASL_GSSAPI_TOKEN:
sasl->params->getmessage(data->state.buffer, &serverdata);
if(sasl->mutual_auth) {
/* Decode the user token challenge and create the optional response
message */
result = Curl_auth_create_gssapi_user_message(data, NULL, NULL,
NULL, NULL,
sasl->mutual_auth,
serverdata, &conn->krb5,
&resp, &len);
newstate = SASL_GSSAPI_NO_DATA;
}
else
/* Decode the security challenge and create the response message */
result = Curl_auth_create_gssapi_security_message(data, serverdata,
&conn->krb5,
&resp, &len);
break;
case SASL_GSSAPI_NO_DATA:
sasl->params->getmessage(data->state.buffer, &serverdata);
/* Decode the security challenge and create the response message */
result = Curl_auth_create_gssapi_security_message(data, serverdata,
&conn->krb5,
&resp, &len);
break;
#endif
case SASL_OAUTH2:
/* Create the authorisation message */
if(sasl->authused == SASL_MECH_OAUTHBEARER) {
result = Curl_auth_create_oauth_bearer_message(data, conn->user,
conn->host.name,
conn->port,
conn->oauth_bearer,
&resp, &len);
/* Failures maybe sent by the server as continuations for OAUTHBEARER */
newstate = SASL_OAUTH2_RESP;
}
else
result = Curl_auth_create_oauth_bearer_message(data, conn->user,
NULL, 0,
conn->oauth_bearer,
&resp, &len);
break;
case SASL_OAUTH2_RESP:
/* The continuation is optional so check the response code */
if(code == sasl->params->finalcode) {
/* Final response was received so we are done */
*progress = SASL_DONE;
state(sasl, conn, SASL_STOP);
return result;
}
else if(code == sasl->params->contcode) {
/* Acknowledge the continuation by sending a 0x01 response base64
encoded */
resp = strdup("AQ==");
if(!resp)
result = CURLE_OUT_OF_MEMORY;
break;
}
else {
*progress = SASL_DONE;
state(sasl, conn, SASL_STOP);
return CURLE_LOGIN_DENIED;
}
case SASL_CANCEL:
/* Remove the offending mechanism from the supported list */
sasl->authmechs ^= sasl->authused;
/* Start an alternative SASL authentication */
result = Curl_sasl_start(sasl, conn, sasl->force_ir, progress);
newstate = sasl->state; /* Use state from Curl_sasl_start() */
break;
default:
failf(data, "Unsupported SASL authentication mechanism");
result = CURLE_UNSUPPORTED_PROTOCOL; /* Should not happen */
break;
}
switch(result) {
case CURLE_BAD_CONTENT_ENCODING:
/* Cancel dialog */
result = sasl->params->sendcont(conn, "*");
newstate = SASL_CANCEL;
break;
case CURLE_OK:
if(resp)
result = sasl->params->sendcont(conn, resp);
break;
default:
newstate = SASL_STOP; /* Stop on error */
*progress = SASL_DONE;
break;
}
free(resp);
state(sasl, conn, newstate);
return result;
}
|
from django.contrib import admin
from models import IncomingSMS, OutgoingSMS, IncomingMMS, MMSFile
class IncomingSMSAdmin(admin.ModelAdmin):
list_display = ['sender', 'received_at']
class OutgoingSMSAdmin(admin.ModelAdmin):
list_display = ['recipient', 'message', 'sent_at', 'sent', 'delivery_status']
class IncomingMMSAdmin(admin.ModelAdmin):
list_display = ['sender', 'subject', 'received_at']
admin.site.register(IncomingSMS, IncomingSMSAdmin)
admin.site.register(OutgoingSMS, OutgoingSMSAdmin)
admin.site.register(IncomingMMS, IncomingMMSAdmin)
admin.site.register(MMSFile)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.