rem
stringlengths 0
322k
| add
stringlengths 0
2.05M
| context
stringlengths 8
228k
|
---|---|---|
opt['label'] = a.pop('configname')
|
opt['label'] = opt('configName')
|
def getParam(options=[], doc='', details='', noDialog=False, checkUnprocessedArgs=True, verbose=False, nCol=1): """ get parameter from either - useTk ... - command line argument - configuration file specified by -f file, or - prompt for user input parameter: verbose: whether or not print detailed info checkUnprocessedArgs: check args, avoid misspelling of arg name options: a list of dictionaries with key arg: command line argument, conformable to that of python getopt module. For example, "d:" means -d name longarg: command line argument, --arg. For exmaple "mu=". c.f. getopt. label: config name in a config file default: default value if user hit enter for prompt. Default value can not be none allowedTypes: an array of allowed types. Default to string. if type is not string, input will be evaluated and resulting type will be checked. jump: go to option 'jump' if current option is True. This is useful for -h (goto -1 (end)) or conditional options where you need only part of the options. goto can not go backwards. jumpIfFalse: go to option 'jumpIfFalse' if current option is False. This function will first check command line argument. If the argument is available, use its value. Otherwise check if a config file is specified. If so, get the value from the config file. If both failed, prompt user to input a value. All input will be checked against types, if exists, an array of allowed types. """ # check if --noDialog, -h is present # or there is no 'label' in the options structure # for backward compatibility, change 'configName' to 'label' for opt in options: if opt.has_key('configName'): print 'Warning: configName is obsolete, please use "label" instead' opt['label'] = a.pop('configname') useDefault = '--useDefault' in sys.argv[1:] if noDialog or '--noDialog' in sys.argv[1:] or '-h' in sys.argv[1:] or '--help' in sys.argv[1:] \ or True not in map(lambda x:x.has_key('label'), options): return termGetParam(options, doc, verbose, useDefault) else: if useTkinter: return tkGetParam(options, sys.argv[0], doc, details, checkUnprocessedArgs, nCol) elif useWxPython: return wxGetParam(options, sys.argv[0], doc, details, checkUnprocessedArgs, nCol) else: return termGetParam(options, doc, verbose, useDefault)
|
geno = [] if type(atPloidy) == type(1): ploidy = [atPloidy] elif len(atPloidy) > 0: ploidy = atPloidy else: ploidy = range(0, pop.ploidy()) if len(atLoci) > 0: loci = atLoci else: loci = range(pop.totNumLoci()) gs = pop.genoSize() tl = pop.totNumLoci() if len(indRange) > 0: if type(indRange[0]) not in [type([]), type(())]: indRange = [indRange] arr = pop.arrGenotype() for r in indRange: for i in range(r[0], r[1]): for p in ploidy: for loc in loci: geno.append( arr[ gs*i + p*tl + loc] ) elif len(subPop) > 0: for sp in subPop: arr = pop.arrGenotype(sp) for i in range(pop.subPopSize(sp)): for p in ploidy: for loc in loci: geno.append(arr[ gs*i + p*tl +loc]) else: arr = pop.arrGenotype() if len(ploidy) == 0 and len(atLoci) == 0: geno = pop.arrGenotype()
|
geno = [] if type(atPloidy) == type(1): ploidy = [atPloidy] elif len(atPloidy) > 0: ploidy = atPloidy
|
def getGenotype(pop, atLoci=[], subPop=[], indRange=[], atPloidy=[]): '''Obtain genotype as specified by parameters atLoci: subset of loci, default to all subPop: subset of subpopulations, default ao all indRange: individual ranges ''' geno = [] if type(atPloidy) == type(1): ploidy = [atPloidy] elif len(atPloidy) > 0: ploidy = atPloidy else: ploidy = range(0, pop.ploidy()) if len(atLoci) > 0: loci = atLoci else: loci = range(pop.totNumLoci()) gs = pop.genoSize() tl = pop.totNumLoci() if len(indRange) > 0: if type(indRange[0]) not in [type([]), type(())]: indRange = [indRange] arr = pop.arrGenotype() for r in indRange: for i in range(r[0], r[1]): for p in ploidy: for loc in loci: geno.append( arr[ gs*i + p*tl + loc] ) elif len(subPop) > 0: for sp in subPop: arr = pop.arrGenotype(sp) for i in range(pop.subPopSize(sp)): for p in ploidy: for loc in loci: geno.append(arr[ gs*i + p*tl +loc]) else: arr = pop.arrGenotype() if len(ploidy) == 0 and len(atLoci) == 0: geno = pop.arrGenotype() else: for i in range(pop.popSize()): for p in ploidy: for loc in loci: geno.append( arr[ gs*i + p*tl +loc] ) return geno
|
for i in range(pop.popSize()): for p in ploidy: for loc in loci: geno.append( arr[ gs*i + p*tl +loc] ) return geno
|
ploidy = range(0, pop.ploidy()) if len(atLoci) > 0: loci = atLoci else: loci = range(pop.totNumLoci()) gs = pop.genoSize() tl = pop.totNumLoci() if len(indRange) > 0: if type(indRange[0]) not in [type([]), type(())]: indRange = [indRange] arr = pop.arrGenotype() for r in indRange: for i in range(r[0], r[1]): for p in ploidy: for loc in loci: geno.append( arr[ gs*i + p*tl + loc] ) elif len(subPop) > 0: for sp in subPop: arr = pop.arrGenotype(sp) for i in range(pop.subPopSize(sp)): for p in ploidy: for loc in loci: geno.append(arr[ gs*i + p*tl +loc]) else: arr = pop.arrGenotype() if len(ploidy) == 0 and len(atLoci) == 0: geno = pop.arrGenotype() else: for i in range(pop.popSize()): for p in ploidy: for loc in loci: geno.append( arr[ gs*i + p*tl +loc] ) return geno
|
def getGenotype(pop, atLoci=[], subPop=[], indRange=[], atPloidy=[]): '''Obtain genotype as specified by parameters atLoci: subset of loci, default to all subPop: subset of subpopulations, default ao all indRange: individual ranges ''' geno = [] if type(atPloidy) == type(1): ploidy = [atPloidy] elif len(atPloidy) > 0: ploidy = atPloidy else: ploidy = range(0, pop.ploidy()) if len(atLoci) > 0: loci = atLoci else: loci = range(pop.totNumLoci()) gs = pop.genoSize() tl = pop.totNumLoci() if len(indRange) > 0: if type(indRange[0]) not in [type([]), type(())]: indRange = [indRange] arr = pop.arrGenotype() for r in indRange: for i in range(r[0], r[1]): for p in ploidy: for loc in loci: geno.append( arr[ gs*i + p*tl + loc] ) elif len(subPop) > 0: for sp in subPop: arr = pop.arrGenotype(sp) for i in range(pop.subPopSize(sp)): for p in ploidy: for loc in loci: geno.append(arr[ gs*i + p*tl +loc]) else: arr = pop.arrGenotype() if len(ploidy) == 0 and len(atLoci) == 0: geno = pop.arrGenotype() else: for i in range(pop.popSize()): for p in ploidy: for loc in loci: geno.append( arr[ gs*i + p*tl +loc] ) return geno
|
if type(var) == type( dw({}) ): var = var.__dict__ if level < 0 or (level > 0 and curLevel < level): if type(var) == types.ListType or type(var) == types.TupleType: index = 0 for x in var: if type(x) != types.ListType and type(x) != types.DictType: if x != None: if type(var) == types.ListType: print ' '*indent, '['+str(index)+']\t', x else: print ' '*indent, '('+str(index)+')\t', x elif type(x) == types.ListType or type(x) == types.DictType: if type(var) == types.ListType: print ' '*indent, '['+str(index)+']\n', else: print ' '*indent, '('+str(index)+')\n', _listVars(x, level, name, False, indent+2, curLevel + 1) index += 1 elif type(var) == types.DictType: for x in var.items(): if not type(x[1]) in [types.ListType, types.DictType, types.TupleType]: if name == '' or x[0] == name: print ' '*indent, x[0], ':\t', x[1] for x in var.items(): if x[0] != 'subPop' and type(x[1]) in [types.ListType, types.DictType, types.TupleType]: if name == '' or x[0] == name: print ' '*indent, x[0], ':\n', _listVars(x[1], level, name, False, indent+2, curLevel + 1) if subPop == True and var.has_key('subPop'): print ' '*indent, 'subPop\n', _listVars(var['subPop'], level, name, False, indent+2, curLevel + 1)
|
if type(var) == type( dw({}) ): var = var.__dict__ if level < 0 or (level > 0 and curLevel < level): if type(var) == types.ListType or type(var) == types.TupleType: index = 0 for x in var: if type(x) != types.ListType and type(x) != types.DictType: if x != None: if type(var) == types.ListType: print ' '*indent, '['+str(index)+']\t', x else: print ' '*indent, '('+str(index)+')\t', x elif type(x) == types.ListType or type(x) == types.DictType: if type(var) == types.ListType: print ' '*indent, '['+str(index)+']\n', else: print ' '*indent, '('+str(index)+')\n', _listVars(x, level, name, False, indent+2, curLevel + 1) index += 1 elif type(var) == types.DictType: for x in var.items(): if not type(x[1]) in [types.ListType, types.DictType, types.TupleType]: if name == '' or x[0] == name: print ' '*indent, x[0], ':\t', x[1] for x in var.items(): if x[0] != 'subPop' and type(x[1]) in [types.ListType, types.DictType, types.TupleType]: if name == '' or x[0] == name: print ' '*indent, x[0], ':\n', _listVars(x[1], level, name, False, indent+2, curLevel + 1) if subPop == True and var.has_key('subPop'): print ' '*indent, 'subPop\n', _listVars(var['subPop'], level, name, False, indent+2, curLevel + 1) else: print ' '*indent, var else: if type(var) == types.ListType or type(var) == types.TupleType: print ' '*indent, 'list of length', len(var) elif type(var) == types.DictType: print ' '*indent, 'dict with keys [', for num in range(0,len(var.keys())): if type(var.keys()[num]) == types.StringType: print "'"+ var.keys()[num] + "',", else: print var.keys()[num], ",", if num != len(var.keys())-1 and num%4 == 3: print '\n' + ' '*(indent+5), print ']' else: print ' '*indent, var def ListVars(var, level=-1, name='', subPop=True, useWxPython=True): ''' list a variable in tree format, either in text format or in a wxPython window. var: any variable to be viewed. Can be a dw object returned by dvars() function level: level of display. name: only view certain variable subPop: whether or not display info in subPop useWxPython: if True, use terminal output even if wxPython is available. ''' if not useWxPython: _listVars(var, level, name, subPop, 0, 0) return try: import wx, wx.py.filling as fill except: _listVars(var, level, name, subPop, 0, 0) return app = wx.App() wx.InitAllImageHandlers() if var==None: fillFrame = fill.FillingFrame()
|
def _listVars(var, level=-1, name='', subPop=True, indent=0, curLevel=0): ''' called by listVars. Will list variables recursively''' if type(var) == type( dw({}) ): var = var.__dict__ # all level or level < specified maximum level if level < 0 or (level > 0 and curLevel < level): # list is list or typle type if type(var) == types.ListType or type(var) == types.TupleType: index = 0 for x in var: # literals if type(x) != types.ListType and type(x) != types.DictType: # this will save a huge amount of output for sparse matrix # generated by Stat(LD=[]) etc. if x != None: if type(var) == types.ListType: print ' '*indent, '['+str(index)+']\t', x else: print ' '*indent, '('+str(index)+')\t', x # nested stuff elif type(x) == types.ListType or type(x) == types.DictType: if type(var) == types.ListType: print ' '*indent, '['+str(index)+']\n', else: print ' '*indent, '('+str(index)+')\n', _listVars(x, level, name, False, indent+2, curLevel + 1) index += 1 elif type(var) == types.DictType: # none array first for x in var.items(): if not type(x[1]) in [types.ListType, types.DictType, types.TupleType]: if name == '' or x[0] == name: print ' '*indent, x[0], ':\t', x[1] # array but not subPop for x in var.items(): if x[0] != 'subPop' and type(x[1]) in [types.ListType, types.DictType, types.TupleType]: if name == '' or x[0] == name: print ' '*indent, x[0], ':\n', _listVars(x[1], level, name, False, indent+2, curLevel + 1) # subPop if subPop == True and var.has_key('subPop'): print ' '*indent, 'subPop\n', _listVars(var['subPop'], level, name, False, indent+2, curLevel + 1) else: print ' '*indent, var else: # out of the range of level if type(var) == types.ListType or type(var) == types.TupleType: print ' '*indent, 'list of length', len(var) elif type(var) == types.DictType: print ' '*indent, 'dict with keys [', for num in range(0,len(var.keys())): if type(var.keys()[num]) == types.StringType: print "'"+ var.keys()[num] + "',", else: print var.keys()[num], ",", if num != len(var.keys())-1 and num%4 == 3: print '\n' + ' '*(indent+5), print ']' else: print ' '*indent, var
|
print ' '*indent, var else: if type(var) == types.ListType or type(var) == types.TupleType: print ' '*indent, 'list of length', len(var) elif type(var) == types.DictType: print ' '*indent, 'dict with keys [', for num in range(0,len(var.keys())): if type(var.keys()[num]) == types.StringType: print "'"+ var.keys()[num] + "',", else: print var.keys()[num], ",", if num != len(var.keys())-1 and num%4 == 3: print '\n' + ' '*(indent+5), print ']' else: print ' '*indent, var def ListVars(var, level=-1, name='', subPop=True, useWxPython=True): ''' list a variable in tree format, either in text format or in a wxPython window. var: any variable to be viewed. Can be a dw object returned by dvars() function level: level of display. name: only view certain variable subPop: whether or not display info in subPop useWxPython: if True, use terminal output even if wxPython is available. ''' if not useWxPython: _listVars(var, level, name, subPop, 0, 0) return try: import wx, wx.py.filling as fill except: _listVars(var, level, name, subPop, 0, 0) return app = wx.App() wx.InitAllImageHandlers() if var==None: fillFrame = fill.FillingFrame() else: if type(var) == type( dw({}) ): fillFrame = fill.FillingFrame(rootObject=var.__dict__, rootLabel='var') else: fillFrame = fill.FillingFrame(rootObject=var, rootLabel='var') fillFrame.Show(True) app.SetTopWindow(fillFrame) app.MainLoop()
|
if type(var) == type( dw({}) ): fillFrame = fill.FillingFrame(rootObject=var.__dict__, rootLabel='var') else: fillFrame = fill.FillingFrame(rootObject=var, rootLabel='var') fillFrame.Show(True) app.SetTopWindow(fillFrame) app.MainLoop()
|
def _listVars(var, level=-1, name='', subPop=True, indent=0, curLevel=0): ''' called by listVars. Will list variables recursively''' if type(var) == type( dw({}) ): var = var.__dict__ # all level or level < specified maximum level if level < 0 or (level > 0 and curLevel < level): # list is list or typle type if type(var) == types.ListType or type(var) == types.TupleType: index = 0 for x in var: # literals if type(x) != types.ListType and type(x) != types.DictType: # this will save a huge amount of output for sparse matrix # generated by Stat(LD=[]) etc. if x != None: if type(var) == types.ListType: print ' '*indent, '['+str(index)+']\t', x else: print ' '*indent, '('+str(index)+')\t', x # nested stuff elif type(x) == types.ListType or type(x) == types.DictType: if type(var) == types.ListType: print ' '*indent, '['+str(index)+']\n', else: print ' '*indent, '('+str(index)+')\n', _listVars(x, level, name, False, indent+2, curLevel + 1) index += 1 elif type(var) == types.DictType: # none array first for x in var.items(): if not type(x[1]) in [types.ListType, types.DictType, types.TupleType]: if name == '' or x[0] == name: print ' '*indent, x[0], ':\t', x[1] # array but not subPop for x in var.items(): if x[0] != 'subPop' and type(x[1]) in [types.ListType, types.DictType, types.TupleType]: if name == '' or x[0] == name: print ' '*indent, x[0], ':\n', _listVars(x[1], level, name, False, indent+2, curLevel + 1) # subPop if subPop == True and var.has_key('subPop'): print ' '*indent, 'subPop\n', _listVars(var['subPop'], level, name, False, indent+2, curLevel + 1) else: print ' '*indent, var else: # out of the range of level if type(var) == types.ListType or type(var) == types.TupleType: print ' '*indent, 'list of length', len(var) elif type(var) == types.DictType: print ' '*indent, 'dict with keys [', for num in range(0,len(var.keys())): if type(var.keys()[num]) == types.StringType: print "'"+ var.keys()[num] + "',", else: print var.keys()[num], ",", if num != len(var.keys())-1 and num%4 == 3: print '\n' + ' '*(indent+5), print ']' else: print ' '*indent, var
|
""" collect variables so that plotters can plot them all at once You can of course put it in other uses Usage: a = dataAggregator( maxRecord=0, recordSize=0) maxRecord: if more data is pushed, the old ones are discarded recordSize: size of record a.push(gen, data, idx=-1) gen: generation number data: one record (will set recordSize if the first time), or idx: if idx!=-1, set data at idx. a.clear() a.range() a.data[i] a.gen a.ready()
|
def __init__(self, maxRecord=0, recordSize=0): """ maxRecord: maxRecorddow size. I.e., maximum generations of data to keep """ self.gen = [] self.data = [] self.maxRecord = maxRecord self.recordSize = recordSize
|
def endl(output=">", outputExpr="", **kwargs): parm = '' for (k,v) in kwargs.items(): parm += ' , ' + str(k) + '=' + str(v) cmd = r'''pyEval( r'"\n"' ''' + ', output="""' + output + \ '""", outputExpr="""' + outputExpr + '"""' + parm + ')' # print cmd return eval(cmd)
|
Internal data storage: self.gen [ .... ] self.data column1 [ ...... ] column2 [ ...... ] ....... each record is pushed at the end of """ def __init__(self, maxRecord=0, recordSize=0): """ maxRecord: maxRecorddow size. I.e., maximum generations of data to keep """ self.gen = [] self.data = [] self.maxRecord = maxRecord self.recordSize = recordSize def __repr__(self): s = str(self.gen) + "\n" for i in range(0, len(self.data)): s += str(self.data[i]) + "\n" return s def clear(self): self.gen = [] self.data = [] def ready(self): return self.recordSize>0 and len(gen)>0 and len( data[0] ) == len( data[-1] ) def flatData(self): res = [] for d in self.data: res.extend( d ) return res def dataRange(self): if len(self.gen) == 0: return [0,0] y0 = min( [ min(x) for x in self.data] ) y1 = max( [ max(x) for x in self.data] ) return [y0,y1] def push(self, _gen, _data, _idx=-1 ): if len(self.gen) == 0: self.gen = [ _gen ] if _idx == -1: if self.recordSize == 0: self.recordSize = len(_data) elif self.recordSize != len(_data): raise exceptions.ValueError("Data length does not equal specfied record size") for i in range(self.recordSize): self.data.append( [_data[i]] ) return elif _idx == 0: if type(_data) in [type(()), type([])]: raise exceptions.ValueError("If idx is specified, _data should not be a list.") self.data = [ [_data] ] return else: raise exceptions.ValueError("Appending data with wrong idx") elif len(self.gen) == 1: if self.gen[-1] == _gen: if _idx == -1: raise exceptions.ValueError("Can not reassign data from this generation") elif self.recordSize != 0 and _idx >= self.recordSize: raise exceptions.ValueError("Data exceeding specified record size") elif _idx == len(self.data): if type(_data) in [type(()), type([])]: raise exceptions.ValueError("If idx is specified, _data should not be a list.") self.data.append( [_data] ) elif _idx < len(self.data): raise exceptions.ValueError("You can not change exisiting data") else: raise exceptions.ValueError("Appending data with wrong idx") else: if self.recordSize == 0: self.recordSize = len(self.data) elif self.recordSize != len(self.data): raise exceptions.ValueError("The first row is imcomplete") self.gen.append( _gen ) if _idx == -1: if self.recordSize != len(_data): raise exceptions.ValueError("Data length does not equal specfied record size") for i in range(self.recordSize): self.data[i].append( _data[i] ) return elif _idx == 0: if type(_data) in [type(()), type([])]: raise exceptions.ValueError("If idx is specified, _data should not be a list.") self.data[0].append(_data) return else: raise exceptions.ValueError("Appending data with wrong idx") else: if self.maxRecord > 0 : if _gen - self.gen[0] >= self.maxRecord: self.gen = self.gen[1:] for i in range(0, self.recordSize): self.data[i] = self.data[i][1:] if self.gen[-1] == _gen: if _idx == -1: raise exceptions.ValueError("Can not reassign data from this generation") elif _idx >= self.recordSize: raise exceptions.ValueError("Data exceeding specified record size") elif _idx < len(self.data): if type(_data) in [type(()), type([])]: raise exceptions.ValueError("If idx is specified, _data should not be a list.") self.data[_idx].append( _data ) else: raise exceptions.ValueError("Appending data with wrong idx") else: self.gen.append( _gen ) if _idx == -1: if self.recordSize != len(_data): raise exceptions.ValueError("Data length does not equal specfied record size") for i in range(self.recordSize): self.data[i].append( _data[i] ) return elif _idx == 0: if type(_data) in [type(()), type([])]: raise exceptions.ValueError("If idx is specified, _data should not be a list.") self.data[0].append(_data) return else: raise exceptions.ValueError("Appending data with wrong idx")
|
def __repr__(self): s = str(self.gen) + "\n" for i in range(0, len(self.data)): s += str(self.data[i]) + "\n" return s def clear(self): self.gen = [] self.data = [] def ready(self): return self.recordSize>0 and len(gen)>0 and len( data[0] ) == len( data[-1] ) def flatData(self): res = [] for d in self.data: res.extend( d ) return res def dataRange(self): if len(self.gen) == 0: return [0,0] y0 = min( [ min(x) for x in self.data] ) y1 = max( [ max(x) for x in self.data] ) return [y0,y1] def push(self, _gen, _data, _idx=-1 ): if len(self.gen) == 0: self.gen = [ _gen ] if _idx == -1: if self.recordSize == 0: self.recordSize = len(_data) elif self.recordSize != len(_data): raise exceptions.ValueError("Data length does not equal specfied record size") for i in range(self.recordSize): self.data.append( [_data[i]] ) return elif _idx == 0: if type(_data) in [type(()), type([])]: raise exceptions.ValueError("If idx is specified, _data should not be a list.") self.data = [ [_data] ] return else: raise exceptions.ValueError("Appending data with wrong idx") elif len(self.gen) == 1: if self.gen[-1] == _gen: if _idx == -1: raise exceptions.ValueError("Can not reassign data from this generation") elif self.recordSize != 0 and _idx >= self.recordSize: raise exceptions.ValueError("Data exceeding specified record size") elif _idx == len(self.data): if type(_data) in [type(()), type([])]: raise exceptions.ValueError("If idx is specified, _data should not be a list.") self.data.append( [_data] ) elif _idx < len(self.data): raise exceptions.ValueError("You can not change exisiting data") else: raise exceptions.ValueError("Appending data with wrong idx") else: if self.recordSize == 0: self.recordSize = len(self.data) elif self.recordSize != len(self.data): raise exceptions.ValueError("The first row is imcomplete") self.gen.append( _gen ) if _idx == -1: if self.recordSize != len(_data): raise exceptions.ValueError("Data length does not equal specfied record size") for i in range(self.recordSize): self.data[i].append( _data[i] ) return elif _idx == 0: if type(_data) in [type(()), type([])]: raise exceptions.ValueError("If idx is specified, _data should not be a list.") self.data[0].append(_data) return else: raise exceptions.ValueError("Appending data with wrong idx") else: if self.maxRecord > 0 : if _gen - self.gen[0] >= self.maxRecord: self.gen = self.gen[1:] for i in range(0, self.recordSize): self.data[i] = self.data[i][1:] if self.gen[-1] == _gen: if _idx == -1: raise exceptions.ValueError("Can not reassign data from this generation") elif _idx >= self.recordSize: raise exceptions.ValueError("Data exceeding specified record size") elif _idx < len(self.data): if type(_data) in [type(()), type([])]: raise exceptions.ValueError("If idx is specified, _data should not be a list.") self.data[_idx].append( _data ) else: raise exceptions.ValueError("Appending data with wrong idx") else: self.gen.append( _gen ) if _idx == -1: if self.recordSize != len(_data): raise exceptions.ValueError("Data length does not equal specfied record size") for i in range(self.recordSize): self.data[i].append( _data[i] ) return elif _idx == 0: if type(_data) in [type(()), type([])]: raise exceptions.ValueError("If idx is specified, _data should not be a list.") self.data[0].append(_data) return else: raise exceptions.ValueError("Appending data with wrong idx")
|
def endl(output=">", outputExpr="", **kwargs): parm = '' for (k,v) in kwargs.items(): parm += ' , ' + str(k) + '=' + str(v) cmd = r'''pyEval( r'"\n"' ''' + ', output="""' + output + \ '""", outputExpr="""' + outputExpr + '"""' + parm + ')' # print cmd return eval(cmd)
|
recombination=0.00001, penetrance=[0,0.25,0.5], exclude=[], pre=True, daf=0.001): """ save population in Linkage format. Currently only support affected sibpairs sampled with affectedSibpairSample operator. pop: population to be saved. Must have ancestralDepth 1. paired individuals are sibs. Parental population are corresponding parents. If pop is a filename, it will be loaded. chrom: Which chromosome is saved.
|
recombination=0.00001, penetrance=[0,0.25,0.5], exclude=[], pre=True, daf=0.001): """ save population in Linkage format. Currently only support affected sibpairs sampled with affectedSibpairSample operator. pop: population to be saved. Must have ancestralDepth 1. paired individuals are sibs. Parental population are corresponding parents. If pop is a filename, it will be loaded. chrom: Which chromosome is saved. popType: population type. Can be 'sibpair' or 'bySubPop'. If type is sibpair, pairs of individuals will be considered as sibpairs. If type is bySubPop, individuals in a subpopulation is considered as siblings. output: output.dat and output.ped will be the data and pedigree file. You may need to rename them to be analyzed by LINKAGE. This allows saving multiple files. outputExpr: expression version of output. exclude: exclude some loci pre: True. pedigree format to be fed to makeped Note: the first child is always the proband. """ if type(pop) == type(''): pop = LoadPopulation(pop) if output != '': file = output elif outputExpr != '': file = eval(outputExpr, globals(), pop.vars() ) else: raise exceptions.ValueError, "Please specify output or outputExpr" try: dataFile = open(file + ".dat", "w") if pre: pedFile = open(file + ".pre", "w") else: pedFile = open(file + ".ped", "w") except exceptions.IOError: raise exceptions.IOError, "Can not open file " + file + ".dat/.ped to write." markers = [pop.chromBegin(chrom)+m for m in range(pop.numLoci(chrom))] for e in exclude: markers.remove(e) dataFile.write( '''%d 0 0 5 << nlocus, risklocus, sexlink, nprogram 0 0 0 0 << mutsys, mutmale, mutfemale, disequil ''' % (len(markers)+1) ) dataFile.write( ' '.join( [str(m+1) for m in range(len(markers))]) + "\n") dataFile.write( "1 2 << affection status code, number of alleles\n") dataFile.write( "%f %f << gene frequency\n" % ( 1-daf, daf) ) dataFile.write( "1 << number of factors\n") dataFile.write( "%f %f %f << penetrance\n" % tuple(penetrance) ) if alleleFreq == []: Stat(pop, alleleFreq=markers) af = pop.dvars().alleleFreq else: af = alleleFreq for marker in markers: numAllele = len(af[marker]) dataFile.write( '3 %d << Marker%d_%d \n' % (numAllele, chrom, pop.chromLocusPair(marker)[1]) ) dataFile.write( ''.join(['%.6f ' % af[marker][ale] for ale in range(numAllele)]) + ' << gene frequencies\n' ) dataFile.write('0 0 << sex difference, interference\n') dataFile.write( ''.join(['%f '%recombination]*len(markers)) + ' << recombination rates \n ') dataFile.write( "1 0.1 0.1\n") dataFile.close() def sexCode(ind): if ind.sex() == Male: return 1 else: return 2 def affectedCode(ind): if ind.affected(): return 2 else: return 1 def genoStr(ind): string = '' for marker in markers: string += "%d %d " % (ind.allele(marker, 0)+1, ind.allele(marker, 1)+1) return string if popType == "sibpair": np = pop.popSize()/2 for ped in range(0, np): pop.useAncestralPop(1) par1 = pop.individual(2*ped) if pre: pedFile.write("%3d 1 0 0 %d %d " \ % (ped+1, sexCode(par1), affectedCode(par1))) else: pedFile.write("%3d 1 0 0 3 0 0 %d 0 %d " \ % (ped+1, sexCode(par1), affectedCode(par1))) pedFile.write( genoStr(par1) + '\n' ) par2 = pop.individual(2*ped+1) if pre: pedFile.write("%3d 2 0 0 %d %d " \ % (ped+1, sexCode(par2), affectedCode(par2))) else: pedFile.write("%3d 2 0 0 3 0 0 %d 0 %d " \ % (ped+1, sexCode(par2), affectedCode(par2))) pedFile.write( genoStr(par2) + '\n' ) if par1.sex() == Male: dadID = 1 momID = 2 else: dadID = 2 momID = 1 pop.useAncestralPop(0) off1 = pop.individual(2*ped) if pre: pedFile.write("%3d 3 %d %d %d %d " \ % (ped+1, dadID, momID, sexCode(off1), affectedCode(off1))) else: pedFile.write("%3d 3 %d %d 0 4 4 %d 1 %d " \ % (ped+1, dadID, momID, sexCode(off1), affectedCode(off1))) pedFile.write( genoStr(off1) + '\n' ) off2 = pop.individual(2*ped+1) if pre: pedFile.write("%3d 4 %d %d %d %d " \ % (ped+1, dadID, momID, sexCode(off2), affectedCode(off2))) else: pedFile.write("%3d 4 %d %d 0 0 0 %d 0 %d " \ % (ped+1, dadID, momID, sexCode(off2), affectedCode(off2))) pedFile.write( genoStr(off2) + '\n' ) elif popType == 'bySubPop': np = pop.numSubPop() offset = 0 for ped in range(0, np): if pop.subPopSize(ped) == 0: continue if ped == 0: offset = 1 pop.useAncestralPop(1) if pop.subPopSize(ped) > 2: raise exceptions.ValueError("Pedigree " + str(ped) + " has more than two parents.") famID = 1 if pop.subPopSize(ped) >= 1: par1 = pop.individual(0, ped) if pre: pedFile.write("%3d %d 0 0 %d %d " \ % (ped+offset, famID, sexCode(par1), affectedCode(par1))) else: pedFile.write("%3d %d 0 0 3 0 0 %d 0 %d " \ % (ped+offset, famID, sexCode(par1), affectedCode(par1))) pedFile.write( genoStr(par1) + '\n' ) famID += 1 if pop.subPopSize(ped) == 2: par2 = pop.individual(1,ped) par2sex = sexCode(par2) if sexCode(par1) == par2sex: print "Warning: same sex parents at pedigree " + str(ped) if sexCode(par1) == Male: par2sex = Female else: par2sex = Male if pre: pedFile.write("%3d %d 0 0 %d %d " \ % (ped+offset, famID, par2sex, affectedCode(par2))) else: pedFile.write("%3d %d 0 0 3 0 0 %d 0 %d " \ % (ped+offset, famID, par2sex, affectedCode(par2))) pedFile.write( genoStr(par2) + '\n' ) famID += 1 if famID == 1: dadID = 0 momID = 0 elif famID == 2: if par1.sex() == Male: dadID = 1 monID = 0 else: dadID = 0 monID = 1 else: if par1.sex() == Male: dadID = 1 momID = 2 else: dadID = 2 momID = 1 pop.useAncestralPop(0) for o in range(0, pop.subPopSize(ped)): off = pop.individual(o,ped) if pre: pedFile.write("%3d %d %d %d %d %d " \ % (ped+offset, famID, dadID, momID, sexCode(off), affectedCode(off))) else: pedFile.write("%3d %d %d %d 0 4 4 %d 1 %d " \ % (ped+offset, famID, dadID, momID, sexCode(off), affectedCode(off))) pedFile.write( genoStr(off) + '\n' ) famID += 1 else: raise exceptions.ValueError("Only popType 'sibpair' and 'bySubPop' are supported.") pedFile.close() def saveLinkage(output='', outputExpr='', **kwargs): "An operator to save population in linkage format" parm = '' for (k,v) in kwargs.items(): parm += str(k) + '=' + str(v) + ', ' opt = '''pyEval(exposePop=1, %s stmts=r\'\'\'SaveLinkage(pop, rep=rep, output=r"""%s""", outputExpr=r"""%s""" )\'\'\')''' % ( parm, output, outputExpr) return eval(opt) def SaveCSV(pop, output='', outputExpr='', exclude=[], **kwargs): """ save file in CSV format This format is used mostly for randTent method. Ihe format is:
|
def SaveLinkage(pop, chrom, popType='sibpair', output='', outputExpr='', alleleFreq=[], recombination=0.00001, penetrance=[0,0.25,0.5], exclude=[], pre=True, daf=0.001): """ save population in Linkage format. Currently only support affected sibpairs sampled with affectedSibpairSample operator. pop: population to be saved. Must have ancestralDepth 1. paired individuals are sibs. Parental population are corresponding parents. If pop is a filename, it will be loaded. chrom: Which chromosome is saved. popType: population type. Can be 'sibpair' or 'bySubPop'. If type is sibpair, pairs of individuals will be considered as sibpairs. If type is bySubPop, individuals in a subpopulation is considered as siblings. output: output.dat and output.ped will be the data and pedigree file. You may need to rename them to be analyzed by LINKAGE. This allows saving multiple files. outputExpr: expression version of output. exclude: exclude some loci pre: True. pedigree format to be fed to makeped Note: the first child is always the proband. """ if type(pop) == type(''): pop = LoadPopulation(pop) if output != '': file = output elif outputExpr != '': file = eval(outputExpr, globals(), pop.vars() ) else: raise exceptions.ValueError, "Please specify output or outputExpr" # open data file and pedigree file to write. try: dataFile = open(file + ".dat", "w") if pre: pedFile = open(file + ".pre", "w") else: pedFile = open(file + ".ped", "w") except exceptions.IOError: raise exceptions.IOError, "Can not open file " + file + ".dat/.ped to write." # look at excluded loci, are they in this chrom? markers = [pop.chromBegin(chrom)+m for m in range(pop.numLoci(chrom))] for e in exclude: markers.remove(e) # # file is opened. # write data file # nlocus # another one is affection status # risklocus (not sure. risk is not to be calculated) # sexlink autosomal: 0 # nprogram whatever # mutsys: all loci are mutational? 0 right now # mutmale # mutfemale # disequil: assume in LD? Yes. dataFile.write( '''%d 0 0 5 << nlocus, risklocus, sexlink, nprogram
|
popType: population type. Can be 'sibpair' or 'bySubPop'. If type is sibpair, pairs of individuals will be considered as sibpairs. If type is bySubPop, individuals in a subpopulation is considered as siblings. output: output.dat and output.ped will be the data and pedigree file. You may need to rename them to be analyzed by LINKAGE. This allows saving multiple files. outputExpr: expression version of output. exclude: exclude some loci pre: True. pedigree format to be fed to makeped Note: the first child is always the proband. """ if type(pop) == type(''): pop = LoadPopulation(pop) if output != '': file = output elif outputExpr != '': file = eval(outputExpr, globals(), pop.vars() ) else: raise exceptions.ValueError, "Please specify output or outputExpr" try: dataFile = open(file + ".dat", "w") if pre: pedFile = open(file + ".pre", "w")
|
chromsome famID,indID,sex,affectedness,allel1-1,allel1-2,allele2-1,allele2-2, ... ... chromosome """ if output != '': file = output elif outputExpr != '': file = eval(outputExpr, globals(), pop.vars() )
|
def SaveLinkage(pop, chrom, popType='sibpair', output='', outputExpr='', alleleFreq=[], recombination=0.00001, penetrance=[0,0.25,0.5], exclude=[], pre=True, daf=0.001): """ save population in Linkage format. Currently only support affected sibpairs sampled with affectedSibpairSample operator. pop: population to be saved. Must have ancestralDepth 1. paired individuals are sibs. Parental population are corresponding parents. If pop is a filename, it will be loaded. chrom: Which chromosome is saved. popType: population type. Can be 'sibpair' or 'bySubPop'. If type is sibpair, pairs of individuals will be considered as sibpairs. If type is bySubPop, individuals in a subpopulation is considered as siblings. output: output.dat and output.ped will be the data and pedigree file. You may need to rename them to be analyzed by LINKAGE. This allows saving multiple files. outputExpr: expression version of output. exclude: exclude some loci pre: True. pedigree format to be fed to makeped Note: the first child is always the proband. """ if type(pop) == type(''): pop = LoadPopulation(pop) if output != '': file = output elif outputExpr != '': file = eval(outputExpr, globals(), pop.vars() ) else: raise exceptions.ValueError, "Please specify output or outputExpr" # open data file and pedigree file to write. try: dataFile = open(file + ".dat", "w") if pre: pedFile = open(file + ".pre", "w") else: pedFile = open(file + ".ped", "w") except exceptions.IOError: raise exceptions.IOError, "Can not open file " + file + ".dat/.ped to write." # look at excluded loci, are they in this chrom? markers = [pop.chromBegin(chrom)+m for m in range(pop.numLoci(chrom))] for e in exclude: markers.remove(e) # # file is opened. # write data file # nlocus # another one is affection status # risklocus (not sure. risk is not to be calculated) # sexlink autosomal: 0 # nprogram whatever # mutsys: all loci are mutational? 0 right now # mutmale # mutfemale # disequil: assume in LD? Yes. dataFile.write( '''%d 0 0 5 << nlocus, risklocus, sexlink, nprogram
|
pedFile = open(file + ".ped", "w") except exceptions.IOError: raise exceptions.IOError, "Can not open file " + file + ".dat/.ped to write." markers = [pop.chromBegin(chrom)+m for m in range(pop.numLoci(chrom))] for e in exclude: markers.remove(e) dataFile.write( '''%d 0 0 5 << nlocus, risklocus, sexlink, nprogram 0 0 0 0 << mutsys, mutmale, mutfemale, disequil ''' % (len(markers)+1) ) dataFile.write( ' '.join( [str(m+1) for m in range(len(markers))]) + "\n") dataFile.write( "1 2 << affection status code, number of alleles\n") dataFile.write( "%f %f << gene frequency\n" % ( 1-daf, daf) ) dataFile.write( "1 << number of factors\n") dataFile.write( "%f %f %f << penetrance\n" % tuple(penetrance) ) if alleleFreq == []: Stat(pop, alleleFreq=markers) af = pop.dvars().alleleFreq else: af = alleleFreq for marker in markers: numAllele = len(af[marker]) dataFile.write( '3 %d << Marker%d_%d \n' % (numAllele, chrom, pop.chromLocusPair(marker)[1]) ) dataFile.write( ''.join(['%.6f ' % af[marker][ale] for ale in range(numAllele)]) + ' << gene frequencies\n' ) dataFile.write('0 0 << sex difference, interference\n') dataFile.write( ''.join(['%f '%recombination]*len(markers)) + ' << recombination rates \n ') dataFile.write( "1 0.1 0.1\n") dataFile.close() def sexCode(ind): if ind.sex() == Male: return 1 else: return 2 def affectedCode(ind): if ind.affected(): return 2 else: return 1 def genoStr(ind): string = '' for marker in markers: string += "%d %d " % (ind.allele(marker, 0)+1, ind.allele(marker, 1)+1) return string if popType == "sibpair":
|
raise exceptions.ValueError, "Please specify output or outputExpr" markers = {} for ch in range(0,pop.numChrom()): markers[ch] = [] for m in range(0, pop.numLoci(ch)): if not pop.chromBegin(ch) + m in exclude: markers[ch].append(m) try: out = open( file, "w") except exceptions.IOError: raise exceptions.IOError, "Can not open file " + file +" to write." content = [''] * pop.numChrom() for i in range(0, pop.numChrom()): content[i] += 'Chromosome ' + str(i+1) + ',,,' for m in markers[i]: content[i] += ",locus%d_%d,%d" % (i+1, m+1, m+1) content[i] += "\n" def sexCode(ind): if ind.sex() == Male: return 1 else: return 2 def affectedCode(ind): if ind.affected(): return 1 else: return 2 def genoStr(ind, ch): string = '' for marker in markers[ch]: string += ",%d,%d" % (ind.allele(marker, 0, ch), ind.allele(marker, 1, ch)) return string
|
def SaveLinkage(pop, chrom, popType='sibpair', output='', outputExpr='', alleleFreq=[], recombination=0.00001, penetrance=[0,0.25,0.5], exclude=[], pre=True, daf=0.001): """ save population in Linkage format. Currently only support affected sibpairs sampled with affectedSibpairSample operator. pop: population to be saved. Must have ancestralDepth 1. paired individuals are sibs. Parental population are corresponding parents. If pop is a filename, it will be loaded. chrom: Which chromosome is saved. popType: population type. Can be 'sibpair' or 'bySubPop'. If type is sibpair, pairs of individuals will be considered as sibpairs. If type is bySubPop, individuals in a subpopulation is considered as siblings. output: output.dat and output.ped will be the data and pedigree file. You may need to rename them to be analyzed by LINKAGE. This allows saving multiple files. outputExpr: expression version of output. exclude: exclude some loci pre: True. pedigree format to be fed to makeped Note: the first child is always the proband. """ if type(pop) == type(''): pop = LoadPopulation(pop) if output != '': file = output elif outputExpr != '': file = eval(outputExpr, globals(), pop.vars() ) else: raise exceptions.ValueError, "Please specify output or outputExpr" # open data file and pedigree file to write. try: dataFile = open(file + ".dat", "w") if pre: pedFile = open(file + ".pre", "w") else: pedFile = open(file + ".ped", "w") except exceptions.IOError: raise exceptions.IOError, "Can not open file " + file + ".dat/.ped to write." # look at excluded loci, are they in this chrom? markers = [pop.chromBegin(chrom)+m for m in range(pop.numLoci(chrom))] for e in exclude: markers.remove(e) # # file is opened. # write data file # nlocus # another one is affection status # risklocus (not sure. risk is not to be calculated) # sexlink autosomal: 0 # nprogram whatever # mutsys: all loci are mutational? 0 right now # mutmale # mutfemale # disequil: assume in LD? Yes. dataFile.write( '''%d 0 0 5 << nlocus, risklocus, sexlink, nprogram
|
for ped in range(0, np): pop.useAncestralPop(1) par1 = pop.individual(2*ped) if pre: pedFile.write("%3d 1 0 0 %d %d " \ % (ped+1, sexCode(par1), affectedCode(par1))) else: pedFile.write("%3d 1 0 0 3 0 0 %d 0 %d " \ % (ped+1, sexCode(par1), affectedCode(par1))) pedFile.write( genoStr(par1) + '\n' ) par2 = pop.individual(2*ped+1) if pre: pedFile.write("%3d 2 0 0 %d %d " \ % (ped+1, sexCode(par2), affectedCode(par2))) else: pedFile.write("%3d 2 0 0 3 0 0 %d 0 %d " \ % (ped+1, sexCode(par2), affectedCode(par2))) pedFile.write( genoStr(par2) + '\n' ) if par1.sex() == Male: dadID = 1 momID = 2 else: dadID = 2 momID = 1 pop.useAncestralPop(0) off1 = pop.individual(2*ped) if pre: pedFile.write("%3d 3 %d %d %d %d " \ % (ped+1, dadID, momID, sexCode(off1), affectedCode(off1))) else: pedFile.write("%3d 3 %d %d 0 4 4 %d 1 %d " \ % (ped+1, dadID, momID, sexCode(off1), affectedCode(off1))) pedFile.write( genoStr(off1) + '\n' ) off2 = pop.individual(2*ped+1) if pre: pedFile.write("%3d 4 %d %d %d %d " \ % (ped+1, dadID, momID, sexCode(off2), affectedCode(off2))) else: pedFile.write("%3d 4 %d %d 0 0 0 %d 0 %d " \ % (ped+1, dadID, momID, sexCode(off2), affectedCode(off2))) pedFile.write( genoStr(off2) + '\n' ) elif popType == 'bySubPop': np = pop.numSubPop() offset = 0 for ped in range(0, np): if pop.subPopSize(ped) == 0: continue if ped == 0: offset = 1 pop.useAncestralPop(1) if pop.subPopSize(ped) > 2: raise exceptions.ValueError("Pedigree " + str(ped) + " has more than two parents.") famID = 1 if pop.subPopSize(ped) >= 1: par1 = pop.individual(0, ped) if pre: pedFile.write("%3d %d 0 0 %d %d " \ % (ped+offset, famID, sexCode(par1), affectedCode(par1))) else: pedFile.write("%3d %d 0 0 3 0 0 %d 0 %d " \ % (ped+offset, famID, sexCode(par1), affectedCode(par1))) pedFile.write( genoStr(par1) + '\n' ) famID += 1 if pop.subPopSize(ped) == 2: par2 = pop.individual(1,ped) par2sex = sexCode(par2) if sexCode(par1) == par2sex: print "Warning: same sex parents at pedigree " + str(ped) if sexCode(par1) == Male: par2sex = Female else: par2sex = Male if pre: pedFile.write("%3d %d 0 0 %d %d " \ % (ped+offset, famID, par2sex, affectedCode(par2))) else: pedFile.write("%3d %d 0 0 3 0 0 %d 0 %d " \ % (ped+offset, famID, par2sex, affectedCode(par2))) pedFile.write( genoStr(par2) + '\n' ) famID += 1 if famID == 1: dadID = 0 momID = 0 elif famID == 2: if par1.sex() == Male: dadID = 1 monID = 0 else: dadID = 0 monID = 1 else: if par1.sex() == Male: dadID = 1 momID = 2 else: dadID = 2 momID = 1 pop.useAncestralPop(0) for o in range(0, pop.subPopSize(ped)): off = pop.individual(o,ped) if pre: pedFile.write("%3d %d %d %d %d %d " \ % (ped+offset, famID, dadID, momID, sexCode(off), affectedCode(off))) else: pedFile.write("%3d %d %d %d 0 4 4 %d 1 %d " \ % (ped+offset, famID, dadID, momID, sexCode(off), affectedCode(off))) pedFile.write( genoStr(off) + '\n' ) famID += 1 else: raise exceptions.ValueError("Only popType 'sibpair' and 'bySubPop' are supported.") pedFile.close() def saveLinkage(output='', outputExpr='', **kwargs): "An operator to save population in linkage format" parm = '' for (k,v) in kwargs.items(): parm += str(k) + '=' + str(v) + ', ' opt = '''pyEval(exposePop=1, %s stmts=r\'\'\'SaveLinkage(pop, rep=rep, output=r"""%s""", outputExpr=r"""%s""" )\'\'\')''' % ( parm, output, outputExpr) return eval(opt) def SaveCSV(pop, output='', outputExpr='', exclude=[], **kwargs): """ save file in CSV format This format is used mostly for randTent method. Ihe format is: chromsome famID,indID,sex,affectedness,allel1-1,allel1-2,allele2-1,allele2-2, ... ... chromosome """ if output != '': file = output elif outputExpr != '': file = eval(outputExpr, globals(), pop.vars() ) else: raise exceptions.ValueError, "Please specify output or outputExpr" markers = {} for ch in range(0,pop.numChrom()): markers[ch] = [] for m in range(0, pop.numLoci(ch)): if not pop.chromBegin(ch) + m in exclude: markers[ch].append(m) try: out = open( file, "w") except exceptions.IOError: raise exceptions.IOError, "Can not open file " + file +" to write." content = [''] * pop.numChrom() for i in range(0, pop.numChrom()): content[i] += 'Chromosome ' + str(i+1) + ',,,' for m in markers[i]: content[i] += ",locus%d_%d,%d" % (i+1, m+1, m+1) content[i] += "\n" def sexCode(ind): if ind.sex() == Male: return 1 else: return 2 def affectedCode(ind): if ind.affected(): return 1 else: return 2 def genoStr(ind, ch): string = '' for marker in markers[ch]: string += ",%d,%d" % (ind.allele(marker, 0, ch), ind.allele(marker, 1, ch)) return string np = pop.popSize()/2 for ch in range(0, pop.numChrom()): for ped in range(0, np): pop.useAncestralPop(1) par1 = pop.individual(2*ped) content[ch] += "%3d,1,%d,%d" % (ped+1, sexCode(par1), affectedCode(par1)) content[ch] += genoStr(par1, ch) + '\n' par2 = pop.individual(2*ped+1) content[ch] += "%3d,2,%d,%d" % (ped+1, sexCode(par2), affectedCode(par2)) content[ch] += genoStr(par2, ch) + '\n' pop.useAncestralPop(0) off1 = pop.individual(2*ped) content[ch] += "%3d,3,%d,%d" % (ped+1, sexCode(off1), affectedCode(off1)) content[ch] += genoStr(off1, ch) + '\n' off2 = pop.individual(2*ped+1) content[ch] += "%3d,4,%d,%d" % (ped+1, sexCode(off2), affectedCode(off2)) content[ch] += genoStr(off2, ch) + '\n' for i in range(0, pop.numChrom()): out.write(content[i]) out.close()
|
for ch in range(0, pop.numChrom()): for ped in range(0, np): pop.useAncestralPop(1) par1 = pop.individual(2*ped) content[ch] += "%3d,1,%d,%d" % (ped+1, sexCode(par1), affectedCode(par1)) content[ch] += genoStr(par1, ch) + '\n' par2 = pop.individual(2*ped+1) content[ch] += "%3d,2,%d,%d" % (ped+1, sexCode(par2), affectedCode(par2)) content[ch] += genoStr(par2, ch) + '\n' pop.useAncestralPop(0) off1 = pop.individual(2*ped) content[ch] += "%3d,3,%d,%d" % (ped+1, sexCode(off1), affectedCode(off1)) content[ch] += genoStr(off1, ch) + '\n' off2 = pop.individual(2*ped+1) content[ch] += "%3d,4,%d,%d" % (ped+1, sexCode(off2), affectedCode(off2)) content[ch] += genoStr(off2, ch) + '\n' for i in range(0, pop.numChrom()): out.write(content[i]) out.close()
|
def genoStr(ind): string = '' for marker in markers: string += "%d %d " % (ind.allele(marker, 0)+1, ind.allele(marker, 1)+1) return string
|
try: f = open(file) allLines = f.readlines() except: raise exceptions.ValueError("Can not open one of file " + file + ".\n" + \ "Or file format is not correct.") def sexCode(code): if code == 1: return Male
|
try: f = open(file) allLines = f.readlines() except: raise exceptions.ValueError("Can not open one of file " + file + ".\n" + \ "Or file format is not correct.") def sexCode(code): if code == 1: return Male else: return Female def affectedCode(code): if code == 1: return True else: return False numLoci = [] lociOrder = [] lociPos = [] lociNames = [] for line in allLines: if line[:10] == 'Chromosome': numLoci.append(0) lociOrder.append([]) lociPos.append([]) lociNames.append([]) i = len(numLoci) - 1 chInfo = line.split(',') numLoci[i] = (len(chInfo)-4)/2 names = [] for j in range(0, numLoci[i]): lociPos[i].append(float( chInfo[5+j*2])) lociOrder[i].append(lociPos[i][-1]) names.append( chInfo[4+j*2].strip()) lociOrder[i].sort() for j in range(0,len(lociOrder[i])): lociOrder[i][j] = lociPos[i].index(lociOrder[i][j]) lociPos[i].sort() for j in range(0,len(lociOrder[i])): lociNames[i].append( names[ lociOrder[i][j] ]) i = 0 parSizes = [0] offSizes = [0] curFam = 0 for line in allLines: if line[:10] == 'Chromosome': if i==0: i = 1 continue else: break fam,mem = map(int,line.split(',')[0:2]) if fam!= curFam: for j in range(curFam, fam): parSizes.append(0) offSizes.append(0) curFam = fam if mem == 1 or mem == 2: parSizes[fam] += 1 else: offSizes[fam] += 1 offPop = population( subPop=offSizes, loci = numLoci, ploidy=2, lociNames=lociNames, lociPos=lociPos) parPop = population( subPop=parSizes, loci = numLoci, ploidy=2, lociNames=lociNames, lociPos=lociPos) maxAllele = 0 curPar = 0 curOff = 0 curFam = 0 i = -1 for line in allLines: if line[:10] == 'Chromosome': i += 1 continue info = map(int, line.strip().split(',')) if curFam != info[0]: curPar = 0 curOff = 0 curFam = info[0] if info[1] == 1 or info[1] == 2: ind = parPop.individual(curPar, info[0]) curPar += 1 else: ind = offPop.individual(curOff, info[0]) curOff += 1 geno = ind.arrGenotype(0,i) for loc in range(0,offPop.numLoci(i)): geno[loc] = info[4+2* lociOrder[i][loc] ] geno = ind.arrGenotype(1,i) for loc in range(0,offPop.numLoci(i)): geno[loc] = info[5+2* lociOrder[i][loc] ] ind.setSex( sexCode( info[2] )) ind.setAffected( affectedCode( info[3])) if max( info[4:] ) > maxAllele: maxAllele = max( info[4:]) pop = parPop pop.setAncestralDepth(1) pop.pushAndDiscard(offPop) pop.setMaxAllele(maxAllele) return pop def trajFunc(endingGen, traj): ''' return freq at each generation from a simulated trajctories. ''' def func(gen): freq = [] for tr in traj: if gen < endingGen - len(tr) + 1: freq.append( 0 ) else: freq.append( tr[ gen - (endingGen - len(tr) + 1) ] ) return freq return func def FreqTrajectoryMultiStochWithSubPop( curGen, numLoci, freq, NtFunc, fitness, minMutAge, maxMutAge, mode = 'uneven', ploidy=2, restartIfFail=True): ''' Simulate frequency trajectory with subpopulation structure, migration is currently ignored. The essential part of this script is to simulate the trajectory of each subpopulation independently by calling FreqTrajectoryMultiStoch with properly wrapped NtFunc function. If mode = 'even' (default) When freq is the same length of the number of loci. The allele frequency at the last generation will be multi-nomially distributed. If freq for each subpop is specified in the order of loc1-sp1, loc1-sp2, .. loc2-sp1, .... This freq will be used directly. If mode = 'uneven'. The number of disease alleles will be proportional to the interval lengths of 0 x x x 1 while x are uniform [0,1]. The distribution of interval lengths, are roughly exponential (conditional on overall length 1). ' If mode = 'none', subpop will be ignored. This script assume a single-split model of NtFunc ''' numSP = len(NtFunc(curGen)) if numSP == 1 or mode == 'none': traj = FreqTrajectoryMultiStoch( curGen=curGen, freq=freq, NtFunc=NtFunc, fitness=fitness, minMutAge=minMutAge, maxMutAge=maxMutAge, ploidy=ploidy, restartIfFail=restartIfFail) if len(traj) == 0: print "Failed to generate trajectory. You may need to set a different set of parameters." sys.exit(1) return (traj, [curGen-len(x)+1 for x in traj], trajFunc(curGen, traj)) split = curGen; while(True): if len(NtFunc(split)) == 1: break split -= 1 split += 1 if minMutAge < curGen - split: minMutAge = split if maxMutAge == 0: maxMutAge = endGen if minMutAge > maxMutAge: print "Minimal mutant age %d is larger then maximum age %d" % (minMutAge, maxMutAge) sys.exit(1) if len(freq) == numSP*numLoci: freqAll = freq elif len(freq) == numLoci: freqAll = [0]*(numLoci*numSP) if mode == 'even': for i in range(numLoci): wt = NtFunc(curGen) ps = sum(wt) totNum = int(freq[i]*ps) num = rng().randMultinomialVal(totNum, [x/float(ps) for x in wt]) for sp in range(numSP): freqAll[sp+i*numSP] = num[sp]/float(wt[sp]) elif mode == 'uneven': for i in range(numLoci): wt = NtFunc(curGen) totNum = int(freq[i]*sum(wt)) while(True): num = [0,1]+[rng().randUniform01() for x in range(numSP-1)] num.sort() for sp in range(numSP): freqAll[sp+i*numSP] = (num[sp+1]-num[sp])*totNum/wt[sp] if max(freqAll) < 1: break; else: print "Wrong mode parameter is used: ", mode print "Using ", mode, "distribution of alleles at the last generation" print "Frequencies at the last generation: sp0-loc0, loc1, ..., sp1-loc0,..." for sp in range(numSP): print "SP ", sp, ': ', for i in range(numLoci): print "%.3f " % freqAll[sp+i*numSP], print
|
def LoadCSV(file): """ load file from randfam CSV format file: input file For format description, please see SaveCSV """ # determine files to read try: f = open(file) allLines = f.readlines() except: raise exceptions.ValueError("Can not open one of file " + file + ".\n" + \ "Or file format is not correct.") # sex code of ranfam format def sexCode(code): if code == 1: return Male else: return Female def affectedCode(code): if code == 1: return True else: return False # determine loci number on each chromsome numLoci = [] # lociPos[ch][ lociOrder[j] ] will be in order lociOrder = [] lociPos = [] lociNames = [] # process the first time for line in allLines: if line[:10] == 'Chromosome': numLoci.append(0) lociOrder.append([]) lociPos.append([]) lociNames.append([]) i = len(numLoci) - 1 chInfo = line.split(',') numLoci[i] = (len(chInfo)-4)/2 names = [] # store unordered loci name for j in range(0, numLoci[i]): lociPos[i].append(float( chInfo[5+j*2])) lociOrder[i].append(lociPos[i][-1]) names.append( chInfo[4+j*2].strip()) # deal with loci order lociOrder[i].sort() for j in range(0,len(lociOrder[i])): lociOrder[i][j] = lociPos[i].index(lociOrder[i][j]) # adjust loci dist lociPos[i].sort() # really add lociNames for j in range(0,len(lociOrder[i])): lociNames[i].append( names[ lociOrder[i][j] ]) # process the second time # determine family structure i = 0 parSizes = [0] offSizes = [0] curFam = 0 for line in allLines: if line[:10] == 'Chromosome': if i==0: i = 1 continue else: # only process the first block break fam,mem = map(int,line.split(',')[0:2]) if fam!= curFam: # fam = 1 at first for j in range(curFam, fam): parSizes.append(0) offSizes.append(0) curFam = fam if mem == 1 or mem == 2: parSizes[fam] += 1 else: offSizes[fam] += 1 # create a population offPop = population( subPop=offSizes, loci = numLoci, ploidy=2, lociNames=lociNames, lociPos=lociPos) parPop = population( subPop=parSizes, loci = numLoci, ploidy=2, lociNames=lociNames, lociPos=lociPos) # process the third time # fill in info maxAllele = 0 curPar = 0 curOff = 0 curFam = 0 i = -1 # i is the chromosome number for line in allLines: if line[:10] == 'Chromosome': i += 1 continue info = map(int, line.strip().split(',')) if curFam != info[0]: curPar = 0 curOff = 0 curFam = info[0] # info[0] is family ID, as well as subpop id if info[1] == 1 or info[1] == 2: # parents ind = parPop.individual(curPar, info[0]) curPar += 1 else: ind = offPop.individual(curOff, info[0]) curOff += 1 # get genotype of chromosome 1, ploidy 0 geno = ind.arrGenotype(0,i) for loc in range(0,offPop.numLoci(i)): geno[loc] = info[4+2* lociOrder[i][loc] ] # ploidy 1 geno = ind.arrGenotype(1,i) for loc in range(0,offPop.numLoci(i)): geno[loc] = info[5+2* lociOrder[i][loc] ] ind.setSex( sexCode( info[2] )) ind.setAffected( affectedCode( info[3])) if max( info[4:] ) > maxAllele: maxAllele = max( info[4:]) # now we have all info, combine the pop pop = parPop pop.setAncestralDepth(1) pop.pushAndDiscard(offPop) pop.setMaxAllele(maxAllele) return pop
|
return Female def affectedCode(code): if code == 1: return True else: return False numLoci = [] lociOrder = [] lociPos = [] lociNames = [] for line in allLines: if line[:10] == 'Chromosome': numLoci.append(0) lociOrder.append([]) lociPos.append([]) lociNames.append([]) i = len(numLoci) - 1 chInfo = line.split(',') numLoci[i] = (len(chInfo)-4)/2 names = [] for j in range(0, numLoci[i]): lociPos[i].append(float( chInfo[5+j*2])) lociOrder[i].append(lociPos[i][-1]) names.append( chInfo[4+j*2].strip()) lociOrder[i].sort() for j in range(0,len(lociOrder[i])): lociOrder[i][j] = lociPos[i].index(lociOrder[i][j]) lociPos[i].sort() for j in range(0,len(lociOrder[i])): lociNames[i].append( names[ lociOrder[i][j] ]) i = 0 parSizes = [0] offSizes = [0] curFam = 0 for line in allLines: if line[:10] == 'Chromosome': if i==0: i = 1 continue else: break fam,mem = map(int,line.split(',')[0:2]) if fam!= curFam: for j in range(curFam, fam): parSizes.append(0) offSizes.append(0) curFam = fam if mem == 1 or mem == 2: parSizes[fam] += 1 else: offSizes[fam] += 1 offPop = population( subPop=offSizes, loci = numLoci, ploidy=2, lociNames=lociNames, lociPos=lociPos) parPop = population( subPop=parSizes, loci = numLoci, ploidy=2, lociNames=lociNames, lociPos=lociPos) maxAllele = 0 curPar = 0 curOff = 0 curFam = 0 i = -1 for line in allLines: if line[:10] == 'Chromosome': i += 1 continue info = map(int, line.strip().split(',')) if curFam != info[0]: curPar = 0 curOff = 0 curFam = info[0] if info[1] == 1 or info[1] == 2: ind = parPop.individual(curPar, info[0]) curPar += 1 else: ind = offPop.individual(curOff, info[0]) curOff += 1 geno = ind.arrGenotype(0,i) for loc in range(0,offPop.numLoci(i)): geno[loc] = info[4+2* lociOrder[i][loc] ] geno = ind.arrGenotype(1,i) for loc in range(0,offPop.numLoci(i)): geno[loc] = info[5+2* lociOrder[i][loc] ] ind.setSex( sexCode( info[2] )) ind.setAffected( affectedCode( info[3])) if max( info[4:] ) > maxAllele: maxAllele = max( info[4:]) pop = parPop pop.setAncestralDepth(1) pop.pushAndDiscard(offPop) pop.setMaxAllele(maxAllele) return pop def trajFunc(endingGen, traj): ''' return freq at each generation from a simulated trajctories. ''' def func(gen): freq = [] for tr in traj: if gen < endingGen - len(tr) + 1: freq.append( 0 ) else: freq.append( tr[ gen - (endingGen - len(tr) + 1) ] ) return freq return func def FreqTrajectoryMultiStochWithSubPop( curGen, numLoci, freq, NtFunc, fitness, minMutAge, maxMutAge, mode = 'uneven', restartIfFail=True): ''' Simulate frequency trajectory with subpopulation structure, migration is currently ignored. The essential part of this script is to simulate the trajectory of each subpopulation independently by calling FreqTrajectoryMultiStoch with properly wrapped NtFunc function. If mode = 'even' (default) When freq is the same length of the number of loci. The allele frequency at the last generation will be multi-nomially distributed. If freq for each subpop is specified in the order of loc1-sp1, loc1-sp2, .. loc2-sp1, .... This freq will be used directly. If mode = 'uneven'. The number of disease alleles will be proportional to the interval lengths of 0 x x x 1 while x are uniform [0,1]. The distribution of interval lengths, are roughly exponential (conditional on overall length 1). ' If mode = 'none', subpop will be ignored. This script assume a single-split model of NtFunc ''' numSP = len(NtFunc(curGen)) if numSP == 1 or mode == 'none': traj = FreqTrajectoryMultiStoch( curGen=curGen, freq=freq,
|
raise exceptions.ValueError("Wrong freq length") spTraj = [0]*numSP*numLoci for sp in range(numSP): print "Generting trajectory for subpopulation %d (generation %d - %d)" % (sp, split, curGen) def spPopSize(gen): if gen < split: return [NtFunc(split-1)[0]] else: return [NtFunc(gen)[sp]] while True: t = FreqTrajectoryMultiStoch( curGen=curGen, freq=[freqAll[sp+x*numSP] for x in range(numLoci)], NtFunc=spPopSize, fitness=fitness, minMutAge=curGen-split, maxMutAge=curGen-split, ploidy=ploidy, restartIfFail=False) if 0 in [len(x) for x in t]: print "Failed to generate trajectory. You may need to set a different set of parameters." sys.exit(1) if 0 in [x[0] for x in t]: print "Subpop return 0 index. restart " else: break; for i in range(numLoci): spTraj[sp+i*numSP] = t[i] traj = [] for i in range(numLoci): traj.append([]) for g in range(split, curGen+1): totAllele = sum( [ spTraj[sp+i*numSP][g-split] * NtFunc(g)[sp] for sp in range(numSP) ]) traj[i].append( totAllele / sum(NtFunc(g)) ) print "Starting allele frequency (at split) ", [traj[i][0] for i in range(numLoci)] print "Generating combined trajsctory with range: ", minMutAge, " - ", maxMutAge trajBeforeSplit = FreqTrajectoryMultiStoch( curGen=split, freq=[traj[i][0] for i in range(numLoci)],
|
def sexCode(code): if code == 1: return Male else: return Female
|
fitness=fitness, minMutAge=minMutAge, maxMutAge=maxMutAge, restartIfFail=restartIfFail) if len(traj) == 0: print "Failed to generate trajectory. You may need to set a different set of parameters." sys.exit(1) return (traj, [curGen-len(x)+1 for x in traj], trajFunc(curGen, traj)) split = curGen; while(True): if len(NtFunc(split)) == 1: break split -= 1 split += 1 if minMutAge < curGen - split: minMutAge = split if maxMutAge == 0: maxMutAge = endGen if minMutAge > maxMutAge: print "Minimal mutant age %d is larger then maximum age %d" % (minMutAge, maxMutAge) sys.exit(1) if len(freq) == numSP*numLoci: freqAll = freq elif len(freq) == numLoci: freqAll = [0]*(numLoci*numSP) if mode == 'even': for i in range(numLoci): wt = NtFunc(curGen) ps = sum(wt) totNum = int(freq[i]*ps) num = rng().randMultinomialVal(totNum, [x/float(ps) for x in wt]) for sp in range(numSP): freqAll[sp+i*numSP] = num[sp]/float(wt[sp]) elif mode == 'uneven': for i in range(numLoci): wt = NtFunc(curGen) totNum = int(freq[i]*sum(wt)) while(True): num = [0,1]+[rng().randUniform01() for x in range(numSP-1)] num.sort() for sp in range(numSP): freqAll[sp+i*numSP] = (num[sp+1]-num[sp])*totNum/wt[sp] if max(freqAll) < 1: break; else: print "Wrong mode parameter is used: ", mode print "Using ", mode, "distribution of alleles at the last generation" print "Frequencies at the last generation: sp0-loc0, loc1, ..., sp1-loc0,..." for sp in range(numSP): print "SP ", sp, ': ', for i in range(numLoci): print "%.3f " % freqAll[sp+i*numSP], print else: raise exceptions.ValueError("Wrong freq length") spTraj = [0]*numSP*numLoci for sp in range(numSP): print "Generting trajectory for subpopulation %d (generation %d - %d)" % (sp, split, curGen) def spPopSize(gen): if gen < split: return [NtFunc(split-1)[0]] else: return [NtFunc(gen)[sp]] while True: t = FreqTrajectoryMultiStoch( curGen=curGen, freq=[freqAll[sp+x*numSP] for x in range(numLoci)], NtFunc=spPopSize,
|
def FreqTrajectoryMultiStochWithSubPop( curGen, numLoci, freq, NtFunc, fitness, minMutAge, maxMutAge, mode = 'uneven', restartIfFail=True): ''' Simulate frequency trajectory with subpopulation structure, migration is currently ignored. The essential part of this script is to simulate the trajectory of each subpopulation independently by calling FreqTrajectoryMultiStoch with properly wrapped NtFunc function. If mode = 'even' (default) When freq is the same length of the number of loci. The allele frequency at the last generation will be multi-nomially distributed. If freq for each subpop is specified in the order of loc1-sp1, loc1-sp2, .. loc2-sp1, .... This freq will be used directly. If mode = 'uneven'. The number of disease alleles will be proportional to the interval lengths of 0 x x x 1 while x are uniform [0,1]. The distribution of interval lengths, are roughly exponential (conditional on overall length 1). ' If mode = 'none', subpop will be ignored. This script assume a single-split model of NtFunc ''' numSP = len(NtFunc(curGen)) if numSP == 1 or mode == 'none': traj = FreqTrajectoryMultiStoch( curGen=curGen, freq=freq, NtFunc=NtFunc, fitness=fitness, minMutAge=minMutAge, maxMutAge=maxMutAge, restartIfFail=restartIfFail) if len(traj) == 0: print "Failed to generate trajectory. You may need to set a different set of parameters." sys.exit(1) return (traj, [curGen-len(x)+1 for x in traj], trajFunc(curGen, traj)) # other wise, do it in two stages # get the split generation. split = curGen; while(True): if len(NtFunc(split)) == 1: break split -= 1 split += 1 # set default for min/max mutage if minMutAge < curGen - split: minMutAge = split if maxMutAge == 0: maxMutAge = endGen if minMutAge > maxMutAge: print "Minimal mutant age %d is larger then maximum age %d" % (minMutAge, maxMutAge) sys.exit(1) # now, NtFunc(split) has subpopulations # # for each subpopulation if len(freq) == numSP*numLoci: freqAll = freq elif len(freq) == numLoci: freqAll = [0]*(numLoci*numSP) if mode == 'even': for i in range(numLoci): wt = NtFunc(curGen) ps = sum(wt) # total allele number totNum = int(freq[i]*ps) # in subpopulations, according to population size num = rng().randMultinomialVal(totNum, [x/float(ps) for x in wt]) for sp in range(numSP): freqAll[sp+i*numSP] = num[sp]/float(wt[sp]) elif mode == 'uneven': for i in range(numLoci): wt = NtFunc(curGen) # total allele number totNum = int(freq[i]*sum(wt)) while(True): # get [ x x x x x ] while x is uniform [0,1] num = [0,1]+[rng().randUniform01() for x in range(numSP-1)] num.sort() for sp in range(numSP): freqAll[sp+i*numSP] = (num[sp+1]-num[sp])*totNum/wt[sp] if max(freqAll) < 1: break; else: print "Wrong mode parameter is used: ", mode print "Using ", mode, "distribution of alleles at the last generation" print "Frequencies at the last generation: sp0-loc0, loc1, ..., sp1-loc0,..." for sp in range(numSP): print "SP ", sp, ': ', for i in range(numLoci): print "%.3f " % freqAll[sp+i*numSP], print else: raise exceptions.ValueError("Wrong freq length") spTraj = [0]*numSP*numLoci for sp in range(numSP): print "Generting trajectory for subpopulation %d (generation %d - %d)" % (sp, split, curGen) # FreqTraj... will probe Nt for the next geneartion. def spPopSize(gen): if gen < split: return [NtFunc(split-1)[0]] else: return [NtFunc(gen)[sp]] while True: t = FreqTrajectoryMultiStoch( curGen=curGen, freq=[freqAll[sp+x*numSP] for x in range(numLoci)], NtFunc=spPopSize, fitness=fitness, minMutAge=curGen-split, maxMutAge=curGen-split, restartIfFail=False) # failed to generate one of the trajectory if 0 in [len(x) for x in t]: print "Failed to generate trajectory. You may need to set a different set of parameters." sys.exit(1) if 0 in [x[0] for x in t]: print "Subpop return 0 index. restart " else: break; # now spTraj has SP0: loc0,1,2..., SP1 loc 0,1,2,..., ... for i in range(numLoci): spTraj[sp+i*numSP] = t[i] # add all trajectories traj = [] for i in range(numLoci): traj.append([]) for g in range(split, curGen+1): totAllele = sum( [ spTraj[sp+i*numSP][g-split] * NtFunc(g)[sp] for sp in range(numSP) ]) traj[i].append( totAllele / sum(NtFunc(g)) ) # print "Starting allele frequency (at split) ", [traj[i][0] for i in range(numLoci)] print "Generating combined trajsctory with range: ", minMutAge, " - ", maxMutAge trajBeforeSplit = FreqTrajectoryMultiStoch( curGen=split, freq=[traj[i][0] for i in range(numLoci)], NtFunc=NtFunc, fitness=fitness, minMutAge=minMutAge-len(traj[0])+1, maxMutAge=maxMutAge-len(traj[0])+1, restartIfFail=True) if 1 in [len(x) for x in trajBeforeSplit]: print "Failed to generated trajectory. (Tried more than 1000 times)" sys.exit(0) def trajFuncWithSubPop(gen): if gen >= split: return [spTraj[x][gen-split] for x in range(numLoci*numSP)] else: freq = [] for tr in trajBeforeSplit: if gen < split - len(tr) + 1: freq.append( 0 ) else: freq.append( tr[ gen - (split - len(tr) + 1) ] ) return freq trajAll = [] for i in range(numLoci): trajAll.append( [] ) trajAll[i].extend(trajBeforeSplit[i]) trajAll[i].extend(traj[i][1:]) # how exactly should I return a trajectory? return (trajAll, [curGen-len(x)+1 for x in trajAll ], trajFuncWithSubPop)
|
|
minMutAge=curGen-split, maxMutAge=curGen-split, restartIfFail=False) if 0 in [len(x) for x in t]: print "Failed to generate trajectory. You may need to set a different set of parameters." sys.exit(1) if 0 in [x[0] for x in t]: print "Subpop return 0 index. restart " else: break;
|
minMutAge=minMutAge-len(traj[0])+1, maxMutAge=maxMutAge-len(traj[0])+1, ploidy=ploidy, restartIfFail=True) if 1 in [len(x) for x in trajBeforeSplit]: print "Failed to generated trajectory. (Tried more than 1000 times)" sys.exit(0) def trajFuncWithSubPop(gen): if gen >= split: return [spTraj[x][gen-split] for x in range(numLoci*numSP)] else: freq = [] for tr in trajBeforeSplit: if gen < split - len(tr) + 1: freq.append( 0 ) else: freq.append( tr[ gen - (split - len(tr) + 1) ] ) return freq trajAll = []
|
def spPopSize(gen): if gen < split: return [NtFunc(split-1)[0]] else: return [NtFunc(gen)[sp]]
|
spTraj[sp+i*numSP] = t[i] traj = [] for i in range(numLoci): traj.append([]) for g in range(split, curGen+1): totAllele = sum( [ spTraj[sp+i*numSP][g-split] * NtFunc(g)[sp] for sp in range(numSP) ]) traj[i].append( totAllele / sum(NtFunc(g)) ) print "Starting allele frequency (at split) ", [traj[i][0] for i in range(numLoci)] print "Generating combined trajsctory with range: ", minMutAge, " - ", maxMutAge trajBeforeSplit = FreqTrajectoryMultiStoch( curGen=split, freq=[traj[i][0] for i in range(numLoci)], NtFunc=NtFunc, fitness=fitness, minMutAge=minMutAge-len(traj[0])+1, maxMutAge=maxMutAge-len(traj[0])+1, restartIfFail=True) if 1 in [len(x) for x in trajBeforeSplit]: print "Failed to generated trajectory. (Tried more than 1000 times)" sys.exit(0) def trajFuncWithSubPop(gen): if gen >= split: return [spTraj[x][gen-split] for x in range(numLoci*numSP)] else: freq = [] for tr in trajBeforeSplit: if gen < split - len(tr) + 1: freq.append( 0 ) else: freq.append( tr[ gen - (split - len(tr) + 1) ] ) return freq trajAll = [] for i in range(numLoci): trajAll.append( [] ) trajAll[i].extend(trajBeforeSplit[i]) trajAll[i].extend(traj[i][1:]) return (trajAll, [curGen-len(x)+1 for x in trajAll ], trajFuncWithSubPop)
|
trajAll.append( [] ) trajAll[i].extend(trajBeforeSplit[i]) trajAll[i].extend(traj[i][1:]) return (trajAll, [curGen-len(x)+1 for x in trajAll ], trajFuncWithSubPop)
|
def spPopSize(gen): if gen < split: return [NtFunc(split-1)[0]] else: return [NtFunc(gen)[sp]]
|
def mate(par, off): ''' a function that get aprental, offspring generations it will select all male and add 1 to their age, and spread to the generation ''' idx = par.infoIdx('age') for i in range(par.popSize()): ind = par.individual(i) ind.setInfo(ind.info(idx)+1, idx) return True pop = population(20, loci=[1], infoFields=['age', 'stage']) simu = simulator(pop, pyMating(mate)) simu.step([initByValue([1])]) self.assertEqual(simu.population(0).indInfo('age'), tuple([1.0]*20))
|
def testPyMating(self): ' test pyMating '
|
|
os.remove(logFile)
|
try: os.remove(logFile) except: pass
|
def last_two(gen): if gen >= endGen -2: return 2 else: return 1
|
for val in values[g]: entryWidgets[g].select_set( opt['chooseFrom'].index(val))
|
if type(values[g]) in [types.TupleType, types.ListType]: for val in values[g]: entryWidgets[g].select_set( opt['chooseFrom'].index(val)) else: entryWidgets[g].select_set( opt['chooseFrom'].index( values[g] ))
|
def doOK(event): " OK buton is pressed " root1.quit()
|
if type(values[g]) in [types.ListType, types.TupleType] and len(values[g])>0:
|
if type(values[g]) in [types.ListType, types.TupleType]:
|
def formatDesc(text): # linux can auto wrap, windows can not but sometime wrap # at unexpected places... It is safer to wrap at original # place. return '\n'.join( [x.strip() for x in text.splitlines()] )
|
db = multi_database(options.databases)[0][0]
|
db = zope.app.appsetup.appsetup.multi_database(options.databases)[0][0]
|
def debug(args=None): options = load_options(args) zope.app.appsetup.config(options.site_definition) db = multi_database(options.databases)[0][0] notify(zope.app.appsetup.interfaces.DatabaseOpened(db)) return db
|
"ptyperegisterinit", map
|
"ptyperegisterinit",
|
def mergeArrays(a1, a2): a3 = [] for item in a1: a3.append(item) for item in a2: a3.append(item) return a3
|
sys.path = ".."
|
sys.path = os.getcwd()
|
def mergeArrays(a1, a2): a3 = [] for item in a1: a3.append(item) for item in a2: a3.append(item) return a3
|
e = "../overrides/" + xsubdir
|
e = gtkpascalgen_outdir + "/overrides/" + xsubdir
|
def psimplevardecl(vartype, varname): global needtypes global c2penumcopied global wrapperprefix #extra = "" a = vartype.split(" ") extra = " ".join(a[:-1]) if a[-1].endswith("*"): a[-1] = "P" + a[-1][:-1] if a[-1].endswith("*"): # actually this is an c array most of the time so this here is "wrong" a[-1] = "P" + a[-1][:-1] if a[-1].startswith("P"): a[-1] = "PW" + a[-1][1:] vartype = a[-1] else: if vartype.endswith("Class"): if vartype not in needtypes: needtypes.append(vartype) vartype = wrapperprefix + vartype elif vartype == "GtkCallback": vartype = wrapperprefix + vartype elif vartype in c2penumcopied: vartype = wrapperprefix + vartype elif vartype == "int": vartype = "gint{actually int}" varname = varname.lower().replace("_", "") if vartype == "GType": vartype = "TGType" return "%s %s: %s" % (extra, varname, vartype)
|
nf = os.path.join("..", "output", os.path.basename(xsubdir), pfilename)
|
nf = os.path.join(gtkpascalgen_outdir, "output", os.path.basename(xsubdir), pfilename)
|
def psimplevardecl(vartype, varname): global needtypes global c2penumcopied global wrapperprefix #extra = "" a = vartype.split(" ") extra = " ".join(a[:-1]) if a[-1].endswith("*"): a[-1] = "P" + a[-1][:-1] if a[-1].endswith("*"): # actually this is an c array most of the time so this here is "wrong" a[-1] = "P" + a[-1][:-1] if a[-1].startswith("P"): a[-1] = "PW" + a[-1][1:] vartype = a[-1] else: if vartype.endswith("Class"): if vartype not in needtypes: needtypes.append(vartype) vartype = wrapperprefix + vartype elif vartype == "GtkCallback": vartype = wrapperprefix + vartype elif vartype in c2penumcopied: vartype = wrapperprefix + vartype elif vartype == "int": vartype = "gint{actually int}" varname = varname.lower().replace("_", "") if vartype == "GType": vartype = "TGType" return "%s %s: %s" % (extra, varname, vartype)
|
print classname, "warning", fn["name"], "parameter is overridden const but was not originally var"
|
print classname, "warning", fn["name"], "parameter is overridden 'const' but was not originally 'var'"
|
def func_cb(varname, fn): global classname global cclassconstructparams global c2pfuncparamoverride global pextratypes global c2pcallbackpointers global preturntransformers global poverridearraytypes global wrapperpointerprefix global wrapperprefix global pstringtype global interfaceprefix if fn["name"] in c2pfuncparamoverride: overridep = c2pfuncparamoverride[fn["name"]] else: overridep = None if varname == "poverrideornot": if fn["name"].endswith("_sink"): return "override;" elif fn["name"] == "gtk_object_destroy": return "virtual;" return "" if varname == "pclassifier": if isMemberFN(fn) or isInterfaceImplementationFN(fn): return "" else: if isInInterfaceMode(): return "//class" else: return "class" if varname == "preturntransformer": ret = func_cb("preturn", fn) if ret in preturntransformers: return preturntransformers[ret] if ret == "Single": return "" # workaround, fixme Lowlevel return ret if varname == "preturn": if fn["return"] == "gchar**": preturn = arrayTypeOf(pstringtype) else: preturn = c2ptype(fn["return"]) if preturn != None: if preturn.startswith("var "): preturn = preturn[4:] if isFNListReturnOverride(fn): preturn = getFNListReturnOverride(fn)["preturn"] # support ICloneable interface by modding the return type to ICloneable # function Clone: ICloneable; ## TODO parent supports ICloneable ? if fn["name"] == ("%s_copy" % uscclassname): assert(len(fn["args"]) == 1) rover = getFNListReturnOverride(fn) if rover != None and "kind" in rover and rover["kind"] == "interface": preturn = "ICloneable" else: print classname, "warning", fn["name"], "missing interface return override" return preturn if varname == "preturnerrorcode": if isFNListReturnOverride(fn): return getFNListReturnOverride(fn)["errorcode"] else: print classname, "warning", "no known error code for", fn["name"], "; Hint: use 'errorcode' return override" return "?" if varname == "preturnitemrawtype": if not isFNListReturnOverride(fn): print classname, "error", "no list return overide for", fn["name"], "(preturnitemrawtype)" arritemt = getFNListReturnOverride(fn)["arritemt"] if arritemt.startswith(interfaceprefix): nativetype = wrapperpointerprefix + arritemt[len(interfaceprefix):] elif arritemt == "TGtkAccelGroupEntry": # var ? nativetype = wrapperpointerprefix + arritemt[1:] # cough. #print "nativetype", nativetype elif arritemt == pstringtype: #"UTF8String": # or so try: nativetype = getFNListReturnOverride(fn)["arritemraw"] except: nativetype = "PChar" print classname, "warning, no nativetype for return value of function ", fn["name"], "assuming PChar" #"PChar" if nativetype == "?": print classname, "error", fn["name"], "have", nativetype, "for", arritemt else: print classname, "error, cannot find nativetype for ", arritemt, "for function", fn["name"] #print "native", nativetype return nativetype if varname == "wrapperexpectedtype": arritemt = getFNListReturnOverride(fn)["arritemt"] if arritemt.startswith(interfaceprefix): return wrapperpointerprefix + "GObject" # FIXME others if classname == "GtkAccelGroup" and arritemt == "TGtkAccelGroupEntry": #print arritemt #== "GtkAccelGroupEntry*" and #print "YES" return "gtkAccelGroupEntryFromPointer" #if classname == "GtkAccelGroup": # FIXME do that somewhere else (*fromcarray) # if arritemt.startswith(classprefix): # return wrapperpointerprefix + arritemt[len(classprefix):] return "" if varname == "preturnitemtype": if (c2pfnname(fn["name"]) == "Clone"): arritemt = interfaceprefix + "Cloneable" else: arritemt = getFNListReturnOverride(fn)["arritemt"] if isInterface(arritemt): needInterface(arritemt) needImplementationUnitForClass(implementationClassForInterface(arritemt)) return arritemt if varname == "preturnitemtypeas": res = func_cb("preturnitemtype", fn) if isInterface(res) or isImplementationClass(res): return " as %s" % res else: return "" if varname == "preturnlistfinalizer": return getFNListReturnOverride(fn)["pendfree"] if varname == "preturnperitempreaction": try: return getFNListReturnOverride(fn)["pforeach"] except: print classname, "error", fn["name"], "pforeach not found in config" raise return pforeach if varname == "pfreeforeachitem": return getFNListReturnOverride(fn)["pforeachfree"] if varname == "preturnitemclassname": return getFNListReturnOverride(fn)["pclassconstruct"] if varname == "pfunction": return c2pfnname(fn["name"]) if varname == "cfunction": return fn["name"] if varname == "pfunctionparams": a = [] args = fn["args"] invalidlist = getInvalidArgs(fn, args) ixo = 0 if isMemberFN(fn) or isInterfaceImplementationFN(fn): #if len(args) > 0 and args[0][0].lower() == classname.lower() + "*": args = args[1:] # skip instance param ixo = -1 if len(args) > 0 and args[-1][0] == "GError**": args = args[:-1] #print args[0][0] ix = 1-ixo for argtyp, argname in args: canvar = False fnover = getFNListParamOverride(fn, ix) adefault = getFNListParamDefault(fn, ix) ptypeover = None if fnover != None and len(fnover) > 0: c = fnover[0] try: t = fnover[1] it = c2ptype(t) #if isEnum(it): if it.startswith("T"): # FIXME! after conv it = it[1:] except: t = None it = None if c == "type": ptypeover = t if c == "varany": ptypeover = "var " if c == "forceinstring": # for "gchar*" instead of "const gchar*" bug (gtk_paint_shadow_gap) ptypeover = pstringtype if c == "pointer": #print "pointer", fn ptypeover = "Pointer" if c == "ccallback": # TODO pass if c == "userdata": ptypeover = "Pointer" # fixme argname = "userdata" if c == "tvarargs": ptypeover = "const TVarrecArray" argname = "avarargs" if c == "varargs": ptypeover = "array of const" argname = "avarargs" if c == "array": ptypeover = "T%sArray" % it if c == "carray": ptypeover = "T%sArray" % it if c == "varcarray": ptypeover = "var T%sArray" % it if c == "const": ptype = c2ptype(argtyp, False, canvar) if ptype.startswith("var "): ptypeover = "const " + ptype[4:] else: print classname, "warning", fn["name"], "parameter is overridden const but was not originally var" if c == "out": ptype = c2ptype(argtyp, False, canvar) if ptype.startswith("var "): ptypeover = "out " + ptype[4:] else: print classname, "warning", fn["name"], "parameter is overridden const but was not originally var" if ptypeover != None: ptype = ptypeover else: try: ptype = c2ptype(argtyp, False, canvar) except: print classname, "info: while doing", fn raise pname = argname if ptype == None: print classname, "info: while doing", fn # TODO var if ptype == "TGTypeArray": ptype = "ugtypes.TGTypeArray" # weird workaround #elif ptype == "TGIntArray": # pargtyp = "ugtype.TGIntArray", # weird workaround if ptype != None and ptype.startswith("var "): ptype = ptype[4:] if ptype == "": a.append("var %s" % pname) else: a.append("var %s: %s" % (pname, ptype)) elif ptype != None and ptype.startswith("const "): ptype = ptype[6:] a.append("const %s: %s" % (pname, ptype)) elif ptype != None and ptype.startswith("out "): ptype = ptype[4:] a.append("out %s: %s" % (pname, ptype)) else: a.append("%s: %s" % (pname, ptype)) if isDebug() and ptypeover != None: print "overridden", a if adefault != None: a[-1] = a[-1] + " = " + adefault ix = ix + 1 offs = - ixo for item in invalidlist: ix = item - offs try: a = a[:ix] + a[ix+1:] except: print "??!" pass offs = offs + 1 return ";".join(a) if varname == "cfunctioncallparams": a = getCfunctioncallparams(fn) # TODO @ return ",".join(a) print varname return None
|
print classname, "warning", fn["name"], "parameter is overridden const but was not originally var"
|
print classname, "warning", fn["name"], "parameter is overridden 'out' but was not originally 'var'"
|
def func_cb(varname, fn): global classname global cclassconstructparams global c2pfuncparamoverride global pextratypes global c2pcallbackpointers global preturntransformers global poverridearraytypes global wrapperpointerprefix global wrapperprefix global pstringtype global interfaceprefix if fn["name"] in c2pfuncparamoverride: overridep = c2pfuncparamoverride[fn["name"]] else: overridep = None if varname == "poverrideornot": if fn["name"].endswith("_sink"): return "override;" elif fn["name"] == "gtk_object_destroy": return "virtual;" return "" if varname == "pclassifier": if isMemberFN(fn) or isInterfaceImplementationFN(fn): return "" else: if isInInterfaceMode(): return "//class" else: return "class" if varname == "preturntransformer": ret = func_cb("preturn", fn) if ret in preturntransformers: return preturntransformers[ret] if ret == "Single": return "" # workaround, fixme Lowlevel return ret if varname == "preturn": if fn["return"] == "gchar**": preturn = arrayTypeOf(pstringtype) else: preturn = c2ptype(fn["return"]) if preturn != None: if preturn.startswith("var "): preturn = preturn[4:] if isFNListReturnOverride(fn): preturn = getFNListReturnOverride(fn)["preturn"] # support ICloneable interface by modding the return type to ICloneable # function Clone: ICloneable; ## TODO parent supports ICloneable ? if fn["name"] == ("%s_copy" % uscclassname): assert(len(fn["args"]) == 1) rover = getFNListReturnOverride(fn) if rover != None and "kind" in rover and rover["kind"] == "interface": preturn = "ICloneable" else: print classname, "warning", fn["name"], "missing interface return override" return preturn if varname == "preturnerrorcode": if isFNListReturnOverride(fn): return getFNListReturnOverride(fn)["errorcode"] else: print classname, "warning", "no known error code for", fn["name"], "; Hint: use 'errorcode' return override" return "?" if varname == "preturnitemrawtype": if not isFNListReturnOverride(fn): print classname, "error", "no list return overide for", fn["name"], "(preturnitemrawtype)" arritemt = getFNListReturnOverride(fn)["arritemt"] if arritemt.startswith(interfaceprefix): nativetype = wrapperpointerprefix + arritemt[len(interfaceprefix):] elif arritemt == "TGtkAccelGroupEntry": # var ? nativetype = wrapperpointerprefix + arritemt[1:] # cough. #print "nativetype", nativetype elif arritemt == pstringtype: #"UTF8String": # or so try: nativetype = getFNListReturnOverride(fn)["arritemraw"] except: nativetype = "PChar" print classname, "warning, no nativetype for return value of function ", fn["name"], "assuming PChar" #"PChar" if nativetype == "?": print classname, "error", fn["name"], "have", nativetype, "for", arritemt else: print classname, "error, cannot find nativetype for ", arritemt, "for function", fn["name"] #print "native", nativetype return nativetype if varname == "wrapperexpectedtype": arritemt = getFNListReturnOverride(fn)["arritemt"] if arritemt.startswith(interfaceprefix): return wrapperpointerprefix + "GObject" # FIXME others if classname == "GtkAccelGroup" and arritemt == "TGtkAccelGroupEntry": #print arritemt #== "GtkAccelGroupEntry*" and #print "YES" return "gtkAccelGroupEntryFromPointer" #if classname == "GtkAccelGroup": # FIXME do that somewhere else (*fromcarray) # if arritemt.startswith(classprefix): # return wrapperpointerprefix + arritemt[len(classprefix):] return "" if varname == "preturnitemtype": if (c2pfnname(fn["name"]) == "Clone"): arritemt = interfaceprefix + "Cloneable" else: arritemt = getFNListReturnOverride(fn)["arritemt"] if isInterface(arritemt): needInterface(arritemt) needImplementationUnitForClass(implementationClassForInterface(arritemt)) return arritemt if varname == "preturnitemtypeas": res = func_cb("preturnitemtype", fn) if isInterface(res) or isImplementationClass(res): return " as %s" % res else: return "" if varname == "preturnlistfinalizer": return getFNListReturnOverride(fn)["pendfree"] if varname == "preturnperitempreaction": try: return getFNListReturnOverride(fn)["pforeach"] except: print classname, "error", fn["name"], "pforeach not found in config" raise return pforeach if varname == "pfreeforeachitem": return getFNListReturnOverride(fn)["pforeachfree"] if varname == "preturnitemclassname": return getFNListReturnOverride(fn)["pclassconstruct"] if varname == "pfunction": return c2pfnname(fn["name"]) if varname == "cfunction": return fn["name"] if varname == "pfunctionparams": a = [] args = fn["args"] invalidlist = getInvalidArgs(fn, args) ixo = 0 if isMemberFN(fn) or isInterfaceImplementationFN(fn): #if len(args) > 0 and args[0][0].lower() == classname.lower() + "*": args = args[1:] # skip instance param ixo = -1 if len(args) > 0 and args[-1][0] == "GError**": args = args[:-1] #print args[0][0] ix = 1-ixo for argtyp, argname in args: canvar = False fnover = getFNListParamOverride(fn, ix) adefault = getFNListParamDefault(fn, ix) ptypeover = None if fnover != None and len(fnover) > 0: c = fnover[0] try: t = fnover[1] it = c2ptype(t) #if isEnum(it): if it.startswith("T"): # FIXME! after conv it = it[1:] except: t = None it = None if c == "type": ptypeover = t if c == "varany": ptypeover = "var " if c == "forceinstring": # for "gchar*" instead of "const gchar*" bug (gtk_paint_shadow_gap) ptypeover = pstringtype if c == "pointer": #print "pointer", fn ptypeover = "Pointer" if c == "ccallback": # TODO pass if c == "userdata": ptypeover = "Pointer" # fixme argname = "userdata" if c == "tvarargs": ptypeover = "const TVarrecArray" argname = "avarargs" if c == "varargs": ptypeover = "array of const" argname = "avarargs" if c == "array": ptypeover = "T%sArray" % it if c == "carray": ptypeover = "T%sArray" % it if c == "varcarray": ptypeover = "var T%sArray" % it if c == "const": ptype = c2ptype(argtyp, False, canvar) if ptype.startswith("var "): ptypeover = "const " + ptype[4:] else: print classname, "warning", fn["name"], "parameter is overridden const but was not originally var" if c == "out": ptype = c2ptype(argtyp, False, canvar) if ptype.startswith("var "): ptypeover = "out " + ptype[4:] else: print classname, "warning", fn["name"], "parameter is overridden const but was not originally var" if ptypeover != None: ptype = ptypeover else: try: ptype = c2ptype(argtyp, False, canvar) except: print classname, "info: while doing", fn raise pname = argname if ptype == None: print classname, "info: while doing", fn # TODO var if ptype == "TGTypeArray": ptype = "ugtypes.TGTypeArray" # weird workaround #elif ptype == "TGIntArray": # pargtyp = "ugtype.TGIntArray", # weird workaround if ptype != None and ptype.startswith("var "): ptype = ptype[4:] if ptype == "": a.append("var %s" % pname) else: a.append("var %s: %s" % (pname, ptype)) elif ptype != None and ptype.startswith("const "): ptype = ptype[6:] a.append("const %s: %s" % (pname, ptype)) elif ptype != None and ptype.startswith("out "): ptype = ptype[4:] a.append("out %s: %s" % (pname, ptype)) else: a.append("%s: %s" % (pname, ptype)) if isDebug() and ptypeover != None: print "overridden", a if adefault != None: a[-1] = a[-1] + " = " + adefault ix = ix + 1 offs = - ixo for item in invalidlist: ix = item - offs try: a = a[:ix] + a[ix+1:] except: print "??!" pass offs = offs + 1 return ";".join(a) if varname == "cfunctioncallparams": a = getCfunctioncallparams(fn) # TODO @ return ",".join(a) print varname return None
|
print "NO", rest
|
def fnsFromFNLines(fnlines): fns = {} rest = "" for fnline in fnlines: rest = rest + fnline.replace("\n", "") if fnline.find(";") > -1: #fns.append(rest) if rest.find("(*Gtk") > -1: # probably a function pointer type (I hope only them) rest = "" continue if rest.find("(*Gdk") > -1: # probably a function pointer type (I hope only them) rest = "" continue if endofstruct.match(rest.strip()) != None: rest = "" continue name, attr = parseFN(rest) if name == None and attr == None: print "NO", rest rest = "" continue if name == "GdkFilterReturn": # FIXME fix the underlying bug rest = "" continue fns[name] = attr rest = "" return fns
|
|
line = line.replace(")</programlisting>", ");")
|
def stripTags(line): match = re.search(r"^(.*)\)[ ]*:[^<]*</programlisting>.*$", line) if match != None: line = match.group(1) + ");" #line = line.replace(")</programlisting>", ");") # gtk 2.8 forgot the ";" :) while True: match = indexterm1.match(line) if match == None: break line = match.group(1) + match.group(3) while True: match = tags.match(line) if match == None: break line = match.group(1) + match.group(3) return line
|
|
print sys.path
|
def mergeArrays(a1, a2): a3 = [] for item in a1: a3.append(item) for item in a2: a3.append(item) return a3
|
|
return _prelude.idmef_time_get_sec(self.res)
|
return int(_prelude.idmef_time_get_sec(self.res))
|
def __int__(self): """Return the number of seconds.""" return _prelude.idmef_time_get_sec(self.res)
|
if type(py_value) is not str: raise IDMEFValueError(py_value, "expected %s, got %s" % (str, type(py_value)))
|
def _idmef_value_python_to_c(object, py_value): object_type = _prelude.idmef_object_get_value_type(object) if object_type is _prelude.IDMEF_VALUE_TYPE_TIME: time = _idmef_value_time_python_to_c(py_value) c_value = _prelude.idmef_value_new_time(time) if not c_value: raise Error() elif object_type in [ _prelude.IDMEF_VALUE_TYPE_INT16, _prelude.IDMEF_VALUE_TYPE_UINT16, _prelude.IDMEF_VALUE_TYPE_INT32, _prelude.IDMEF_VALUE_TYPE_UINT32, _prelude.IDMEF_VALUE_TYPE_INT64, _prelude.IDMEF_VALUE_TYPE_UINT64, _prelude.IDMEF_VALUE_TYPE_FLOAT, _prelude.IDMEF_VALUE_TYPE_DOUBLE ]: c_value = _idmef_integer_python_to_c(object, py_value) elif object_type is _prelude.IDMEF_VALUE_TYPE_ENUM: c_value = _prelude.idmef_value_new_enum_string(_prelude.idmef_object_get_type(object), py_value) elif object_type is _prelude.IDMEF_VALUE_TYPE_STRING: if type(py_value) is not str: raise IDMEFValueError(py_value, "expected %s, got %s" % (str, type(py_value))) c_string = _prelude.prelude_string_new_dup(py_value) if not c_string: raise Error() c_value = _prelude.idmef_value_new_string(c_string) elif object_type is _prelude.IDMEF_VALUE_TYPE_DATA: if type(py_value) is not str: raise IDMEFValueError(py_value, "expected %s, got %s" % (str, type(py_value))) if type(py_value) is str: c_data = _prelude.idmef_data_new_char_string_dup(py_value) elif type(py_value) is int: c_data = _prelude.idmef_data_new_uint32(py_value) elif type(py_value) is long: c_data = _prelude.idmef_data_new_uint64(py_value) else: raise IDMEFValueError(py_value, "type %s is not handled by idmef_data" % type(py_value)) if not c_data: raise Error() c_value = _prelude.idmef_value_new_data(c_data) else: # internal type not recognized/supported raise Error() if not c_value: raise Error() return c_value
|
|
return (lambda d: None, _prelude.idmef_data_get_char, _prelude.idmef_data_get_byte, _prelude.idmef_data_get_uint32, _prelude.idmef_data_get_uint64, _prelude.idmef_data_get_float, _prelude.idmef_data_get_char_string, _prelude.idmef_data_get_byte_string)[_prelude.idmef_data_get_type(data)](data)
|
value = (lambda d: None, _prelude.idmef_data_get_char, _prelude.idmef_data_get_byte, _prelude.idmef_data_get_uint32, _prelude.idmef_data_get_uint64, _prelude.idmef_data_get_float, _prelude.idmef_data_get_char_string, _prelude.idmef_data_get_byte_string)[_prelude.idmef_data_get_type(data)](data) return value
|
def idmef_value_c_to_python(value): func_type_table = { _prelude.IDMEF_VALUE_TYPE_INT8: _prelude.idmef_value_get_int8, _prelude.IDMEF_VALUE_TYPE_UINT8: _prelude.idmef_value_get_uint8, _prelude.IDMEF_VALUE_TYPE_INT16: _prelude.idmef_value_get_int16, _prelude.IDMEF_VALUE_TYPE_UINT16: _prelude.idmef_value_get_uint16, _prelude.IDMEF_VALUE_TYPE_INT32: _prelude.idmef_value_get_int32, _prelude.IDMEF_VALUE_TYPE_UINT32: _prelude.idmef_value_get_uint32, _prelude.IDMEF_VALUE_TYPE_INT64: _prelude.idmef_value_get_int64, _prelude.IDMEF_VALUE_TYPE_UINT64: _prelude.idmef_value_get_uint64, _prelude.IDMEF_VALUE_TYPE_FLOAT: _prelude.idmef_value_get_float, _prelude.IDMEF_VALUE_TYPE_DOUBLE: _prelude.idmef_value_get_double, } type = _prelude.idmef_value_get_type(value) if type == _prelude.IDMEF_VALUE_TYPE_TIME: time = _prelude.idmef_value_get_time(value) if not time: return None py_time = IDMEFTime() py_time.res = _prelude.idmef_time_clone(time) return py_time if type == _prelude.IDMEF_VALUE_TYPE_STRING: string = _prelude.idmef_value_get_string(value) if not string: return None return _prelude.prelude_string_get_string(string) if type == _prelude.IDMEF_VALUE_TYPE_DATA: data = _prelude.idmef_value_get_data(value) if not data: return None return (lambda d: None, _prelude.idmef_data_get_char, _prelude.idmef_data_get_byte, _prelude.idmef_data_get_uint32, _prelude.idmef_data_get_uint64, _prelude.idmef_data_get_float, _prelude.idmef_data_get_char_string, _prelude.idmef_data_get_byte_string)[_prelude.idmef_data_get_type(data)](data) if type == _prelude.IDMEF_VALUE_TYPE_ENUM: return _prelude.idmef_type_enum_to_string(_prelude.idmef_value_get_object_type(value), _prelude.idmef_value_get_enum(value)) try: func = func_type_table[type] except KeyError: raise Error() return func(value)
|
m = re.compile("(\w+)\[(\w+)\]").match(self.name)
|
m = re.compile("(.+)\[(.+)\]").match(self.name)
|
def __init__(self, parent, option): self.parent = parent self.name = _prelude.prelude_option_get_longopt(option) self.instantiable = bool(_prelude.prelude_option_get_flags(option) & _prelude.ALLOW_MULTIPLE_CALL) if self.name.find("[") != -1: self.instance = True m = re.compile("(\w+)\[(\w+)\]").match(self.name) self.instantiable_name = m.group(1) self.instance_name = m.group(2) else: self.instance = False self.boolean = _prelude.prelude_option_get_has_arg(option) == _prelude.no_argument self.value = _prelude.prelude_option_get_value(option) self.description = _prelude.prelude_option_get_description(option) self.options = [ ] nodes = [ self.name ] while parent: nodes.insert(0, parent.name) parent = parent.parent self.path = ".".join(nodes)
|
_prelude.idmef_value_destroy(c_value)
|
if c_value != None: _prelude.idmef_value_destroy(c_value)
|
def __getitem__(self, object_name): """Get the value of the object in the message.""" object = _prelude.idmef_object_new_fast(object_name) if not object: raise IDMEFObjectError(object_name) c_value = _prelude.idmef_object_get(self.res, object) _prelude.idmef_object_destroy(object)
|
self.instantiable = bool(_prelude.prelude_option_get_flags(option) & _prelude.ALLOW_MULTIPLE_CALL)
|
self.instantiable = bool(_prelude.prelude_option_get_flags(option) & _prelude.HAVE_CONTEXT)
|
def __init__(self, parent, option): self.parent = parent self.name = _prelude.prelude_option_get_longopt(option) self.instantiable = bool(_prelude.prelude_option_get_flags(option) & _prelude.ALLOW_MULTIPLE_CALL) if self.name.find("[") != -1: self.instance = True m = re.compile("(.+)\[(.+)\]").match(self.name) self.instantiable_name = m.group(1) self.instance_name = m.group(2) else: self.instance = False self.boolean = _prelude.prelude_option_get_has_arg(option) == _prelude.no_argument self.value = _prelude.prelude_option_get_value(option) self.description = _prelude.prelude_option_get_description(option) self.options = [ ] nodes = [ self.name ] while parent: nodes.insert(0, parent.name) parent = parent.parent self.path = ".".join(nodes)
|
if _prelude.prelude_client_init(self._client, name, config, len(sys.argv), sys.argv) < 0:
|
if _prelude.prelude_client_init(self._client, name, config, 1, [ sys.argv[0] ]) < 0:
|
def __init__(self, name, config=None): if not name: name = sys.argv[0] if not config: file = os.popen("libprelude-config --prefix") path = file.read() file.close() config = path[:-1] + "/etc/prelude/default/client.conf" self._client = _prelude.prelude_client_new(_prelude.PRELUDE_CLIENT_CAPABILITY_SEND_IDMEF) if not self._client: raise SensorError()
|
def __init__(self, parent, name, description, value):
|
def __init__(self, parent, option):
|
def __init__(self, parent, name, description, value): self.parent = parent self.name = name self.description = description self.value = value self.options = [ ] nodes = [ name ] while parent: nodes.insert(0, parent.name) parent = parent.parent self.path = ".".join(nodes)
|
self.name = name self.description = description self.value = value
|
self.name = _prelude.prelude_option_get_longopt(option) self.instantiable = bool(_prelude.prelude_option_get_flags(option) & _prelude.ALLOW_MULTIPLE_CALL) if self.name.find("[") != -1: self.instance = True m = re.compile("(\w+)\[(\w+)\]").match(self.name) self.instantiable_name = m.group(1) self.instance_name = m.group(2) else: self.instance = False self.boolean = _prelude.prelude_option_get_has_arg(option) == _prelude.no_argument self.value = _prelude.prelude_option_get_value(option) self.description = _prelude.prelude_option_get_description(option)
|
def __init__(self, parent, name, description, value): self.parent = parent self.name = name self.description = description self.value = value self.options = [ ] nodes = [ name ] while parent: nodes.insert(0, parent.name) parent = parent.parent self.path = ".".join(nodes)
|
nodes = [ name ]
|
nodes = [ self.name ]
|
def __init__(self, parent, name, description, value): self.parent = parent self.name = name self.description = description self.value = value self.options = [ ] nodes = [ name ] while parent: nodes.insert(0, parent.name) parent = parent.parent self.path = ".".join(nodes)
|
name = _prelude.prelude_option_get_longopt(cur) value = _prelude.prelude_option_get_value(cur) description = _prelude.prelude_option_get_description(cur) option = Option(parent, name, description, value)
|
option = Option(parent, cur)
|
def _get_option_list(self, parent, start): options = [ ] cur = None while True: cur = _prelude.prelude_option_get_next(start, cur) if not cur: break name = _prelude.prelude_option_get_longopt(cur) value = _prelude.prelude_option_get_value(cur) description = _prelude.prelude_option_get_description(cur) option = Option(parent, name, description, value) options.append(option) if _prelude.prelude_option_has_optlist(cur): option.options = self._get_option_list(option, cur) return options
|
def set_option(self, analyzerid, name, value): msg = self._request(analyzerid, _prelude.PRELUDE_MSG_OPTION_SET, "%s=%s" % (name, value))
|
def set_option(self, analyzerid, name, value=None): if value: value = "%s=%s" % (name, value) else: value = name msg = self._request(analyzerid, _prelude.PRELUDE_MSG_OPTION_SET, value)
|
def set_option(self, analyzerid, name, value): msg = self._request(analyzerid, _prelude.PRELUDE_MSG_OPTION_SET, "%s=%s" % (name, value)) retval = _prelude.prelude_option_recv_set(msg) return retval
|
if len < 0:
|
if size < 0:
|
def __str__(self): """Return the RFC8601 string representation of the object.""" buf = "A" * 128
|
if _prelude.prelude_option_recv_set(msg) < 0:
|
if not _prelude.prelude_option_recv_set(msg):
|
def set_option(self, analyzer_path, name, value=None): if value: value = "%s=%s" % (name, value) else: value = name msg = self._request(analyzer_path, _prelude.PRELUDE_MSG_OPTION_SET, value) if _prelude.prelude_option_recv_set(msg) < 0: raise Error("could not read set_option answer")
|
if _prelude.prelude_option_recv_set(msg) < 0:
|
if not _prelude.prelude_option_recv_set(msg):
|
def commit(self, analyzer_path, instance): msg = self._request(analyzer_path, _prelude.PRELUDE_MSG_OPTION_COMMIT, instance) if _prelude.prelude_option_recv_set(msg) < 0: raise Error("could not read commit answer")
|
if _prelude.prelude_option_recv_set(msg) < 0:
|
if not _prelude.prelude_option_recv_set(msg):
|
def destroy(self, analyzer_path, instance): msg = self._request(analyzer_path, _prelude.PRELUDE_MSG_OPTION_DESTROY, instance)
|
RECV_IDMEF = _prelude.PRELUDE_CLIENT_CAPABILITY_RECV_IDMEF SEND_IDMEF = _prelude.PRELUDE_CLIENT_CAPABILITY_SEND_IDMEF RECV_ADMIN = _prelude.PRELUDE_CLIENT_CAPABILITY_RECV_ADMIN SEND_ADMIN = _prelude.PRELUDE_CLIENT_CAPABILITY_SEND_ADMIN RECV_CM = _prelude.PRELUDE_CLIENT_CAPABILITY_RECV_CM SEND_CM = _prelude.PRELUDE_CLIENT_CAPABILITY_SEND_CM
|
def get_libprelude_prefix(): return os.popen("libprelude-config --prefix").read()[:-1]
|
|
class Admin(Client):
|
class Admin:
|
def is_section(self): return len(self.options) > 0
|
Client.__init__(self, Client.SEND_ADMIN, name, get_libprelude_prefix() + "/etc/prelude/default/client.conf") self._manager_connection = _prelude.prelude_connection_new(self._client, address, port) if not self._manager_connection: raise ClientError("could not create new connection to %s:%p" % (address, port)) if _prelude.prelude_client_set_connection(self._client, self._manager_connection) < 0: raise ClientError("could not set client connection") if _prelude.prelude_connection_connect(self._manager_connection) < 0: raise ClientError("could not connect to manager")
|
self._profile = _prelude.prelude_client_profile_new(name or sys.argv[0]) if not self._profile: raise ClientError("could not create client profile for %s" % name or sys.argv[0]) self._connection = _prelude.prelude_connection_new(address, port) if not self._connection: raise ClientError("could not create connection to %s:%d" % (address, port)) self._msgbuf = _prelude.prelude_connection_new_msgbuf(self._connection) if not self._msgbuf: raise ClientError("could not create msgbuf for connection") if _prelude.prelude_connection_connect(self._connection, self._profile, _prelude.PRELUDE_CONNECTION_CAPABILITY_CONNECT) < 0: raise ClientError("could not connect to %s:%d" % (address, port))
|
def __init__(self, name=None, address="127.0.0.1", port=5554): Client.__init__(self, Client.SEND_ADMIN, name, get_libprelude_prefix() + "/etc/prelude/default/client.conf") self._manager_connection = _prelude.prelude_connection_new(self._client, address, port) if not self._manager_connection: raise ClientError("could not create new connection to %s:%p" % (address, port)) if _prelude.prelude_client_set_connection(self._client, self._manager_connection) < 0: raise ClientError("could not set client connection") if _prelude.prelude_connection_connect(self._manager_connection) < 0: raise ClientError("could not connect to manager")
|
_prelude.prelude_option_new_request(self._client, self._msgbuf, 0, analyzer_path)
|
_prelude.prelude_option_new_request(self._msgbuf, 0, analyzer_path)
|
def _request(self, analyzer_path, type, value=None): _prelude.prelude_option_new_request(self._client, self._msgbuf, 0, analyzer_path) _prelude.prelude_option_push_request(self._msgbuf, type, value) _prelude.prelude_msgbuf_mark_end(self._msgbuf) msg = _prelude.my_prelude_msg_read(_prelude.prelude_connection_get_fd(self._manager_connection)) return msg
|
msg = _prelude.my_prelude_msg_read(_prelude.prelude_connection_get_fd(self._manager_connection))
|
msg = _prelude.my_prelude_msg_read(_prelude.prelude_connection_get_fd(self._connection))
|
def _request(self, analyzer_path, type, value=None): _prelude.prelude_option_new_request(self._client, self._msgbuf, 0, analyzer_path) _prelude.prelude_option_push_request(self._msgbuf, type, value) _prelude.prelude_msgbuf_mark_end(self._msgbuf) msg = _prelude.my_prelude_msg_read(_prelude.prelude_connection_get_fd(self._manager_connection)) return msg
|
c_data = _prelude.idmef_data_new_dup(py_value, len(py_value) + 1)
|
if type(py_value) is str: c_data = _prelude.idmef_data_new_char_string_dup(py_value) elif type(py_value) is int: c_data = _prelude.idmef_data_new_uint32(py_value) elif type(py_value) is long: c_data = _prelude.idmef_data_new_uint64(py_value) else: raise IDMEFValueError(py_value, "type %s is not handled by idmef_data" % type(py_value))
|
def _idmef_value_python_to_c(object, py_value): object_type = _prelude.idmef_object_get_value_type(object) if object_type is _prelude.IDMEF_VALUE_TYPE_TIME: time = _idmef_value_time_python_to_c(py_value) c_value = _prelude.idmef_value_new_time(time) if not c_value: raise Error() elif object_type in [ _prelude.IDMEF_VALUE_TYPE_INT16, _prelude.IDMEF_VALUE_TYPE_UINT16, _prelude.IDMEF_VALUE_TYPE_INT32, _prelude.IDMEF_VALUE_TYPE_UINT32, _prelude.IDMEF_VALUE_TYPE_INT64, _prelude.IDMEF_VALUE_TYPE_UINT64, _prelude.IDMEF_VALUE_TYPE_FLOAT, _prelude.IDMEF_VALUE_TYPE_DOUBLE ]: c_value = _idmef_integer_python_to_c(object, py_value) elif object_type is _prelude.IDMEF_VALUE_TYPE_ENUM: c_value = _prelude.idmef_value_new_enum_string(_prelude.idmef_object_get_type(object), py_value) elif object_type is _prelude.IDMEF_VALUE_TYPE_STRING: if type(py_value) is not str: raise IDMEFValueError(py_value, "expected %s, got %s" % (str, type(py_value))) c_string = _prelude.prelude_string_new_dup(py_value) if not c_string: raise Error() c_value = _prelude.idmef_value_new_string(c_string) elif object_type is _prelude.IDMEF_VALUE_TYPE_DATA: if type(py_value) is not str: raise IDMEFValueError(py_value, "expected %s, got %s" % (str, type(py_value))) c_data = _prelude.idmef_data_new_dup(py_value, len(py_value) + 1) if not c_data: raise Error() c_value = _prelude.idmef_value_new_data(c_data) else: # internal type not recognized/supported raise Error() if not c_value: raise Error() return c_value
|
return _prelude.idmef_data_get_data(data)
|
return (lambda d: None, _prelude.idmef_data_get_char, _prelude.idmef_data_get_byte, _prelude.idmef_data_get_uint32, _prelude.idmef_data_get_uint64, _prelude.idmef_data_get_float, _prelude.idmef_data_get_char_string, _prelude.idmef_data_get_byte_string)[_prelude.idmef_data_get_type(data)](data)
|
def idmef_value_c_to_python(value): func_type_table = { _prelude.IDMEF_VALUE_TYPE_INT8: _prelude.idmef_value_get_int8, _prelude.IDMEF_VALUE_TYPE_UINT8: _prelude.idmef_value_get_uint8, _prelude.IDMEF_VALUE_TYPE_INT16: _prelude.idmef_value_get_int16, _prelude.IDMEF_VALUE_TYPE_UINT16: _prelude.idmef_value_get_uint16, _prelude.IDMEF_VALUE_TYPE_INT32: _prelude.idmef_value_get_int32, _prelude.IDMEF_VALUE_TYPE_UINT32: _prelude.idmef_value_get_uint32, _prelude.IDMEF_VALUE_TYPE_INT64: _prelude.idmef_value_get_int64, _prelude.IDMEF_VALUE_TYPE_UINT64: _prelude.idmef_value_get_uint64, _prelude.IDMEF_VALUE_TYPE_FLOAT: _prelude.idmef_value_get_float, _prelude.IDMEF_VALUE_TYPE_DOUBLE: _prelude.idmef_value_get_double, } type = _prelude.idmef_value_get_type(value) if type == _prelude.IDMEF_VALUE_TYPE_TIME: time = _prelude.idmef_value_get_time(value) if not time: return None py_time = IDMEFTime() py_time.res = _prelude.idmef_time_clone(time) return py_time if type == _prelude.IDMEF_VALUE_TYPE_STRING: string = _prelude.idmef_value_get_string(value) if not string: return None return _prelude.prelude_string_get_string(string) if type == _prelude.IDMEF_VALUE_TYPE_DATA: data = _prelude.idmef_value_get_data(value) if not data: return None return _prelude.idmef_data_get_data(data) if type == _prelude.IDMEF_VALUE_TYPE_ENUM: return _prelude.idmef_type_enum_to_string(_prelude.idmef_value_get_object_type(value), _prelude.idmef_value_get_enum(value)) try: func = func_type_table[type] except KeyError: raise Error() return func(value)
|
def _idmef_value_c_to_python(value):
|
def idmef_value_c_to_python(value):
|
def _idmef_value_c_to_python(value): func_type_table = { type_int16: idmef_value_get_int16, type_uint16: idmef_value_get_uint16, type_int32: idmef_value_get_int32, type_uint32: idmef_value_get_uint32, type_int64: idmef_value_get_int64, type_uint64: idmef_value_get_uint64, type_float: idmef_value_get_float, type_double: idmef_value_get_double, type_enum: idmef_value_get_enum, } type = idmef_value_get_type(value) if type == type_time: time = idmef_value_get_time(value) if not time: return None return IDMEFTime([idmef_time_get_sec(time), idmef_time_get_usec(time)]) if type == type_string: string = idmef_value_get_string(value) if not string: return None return idmef_string_get_string(string) if type == type_data: data = idmef_value_get_data(value) if not data: return None return idmef_data_get_data(data) try: func = func_type_table[type] except KeyError: raise Error() return func(value)
|
return _idmef_value_c_to_python(value)
|
return idmef_value_c_to_python(value)
|
def _idmef_value_list_c_to_python(value): if value is None: return None if not idmef_value_is_list(value): return _idmef_value_c_to_python(value) ret = [ ] for i in range(idmef_value_get_count(value)): ret.append(_idmef_value_list_c_to_python(idmef_value_get_nth(value, i))) return ret
|
def __init__(self): """Create a new empty IDMEF message.""" self.res = idmef_message_new() if not self.res: raise Error()
|
def __init__(self, res=None): """Create a new empty IDMEF message.""" if res: self.res = res else: self.res = idmef_message_new() if not self.res: raise Error()
|
def __init__(self): """Create a new empty IDMEF message.""" self.res = idmef_message_new() if not self.res: raise Error()
|
def __append(self, value, operator): top_criteria = IDMEFCriteria() idmef_criteria_destroy(top_criteria.res) top_criteria.res = idmef_criteria_clone(self.res) if not top_criteria.res: raise Error() if type(value) is IDMEFCriteria: criteria2 = idmef_criteria_clone(value.res) if not criteria2: raise Error() else: criteria2 = idmef_criteria_new_string(value) if not criteria2: raise IDMEFCriteriaError(value) if idmef_criteria_add_criteria(top_criteria.res, criteria2, operator) < 0: idmef_criteria_destroy(criteria2) raise Error() return top_criteria def __and__(self, value): return self.__append(value, operator_and) def __or__(self, value): return self.__append(value, operator_or)
|
def __append(self, new_sub_criteria, operator): new_criteria = IDMEFCriteria() idmef_criteria_destroy(new_criteria.res) new_criteria.res = idmef_criteria_clone(self.res) if not new_criteria.res: raise Error() new_sub_criteria_res = idmef_criteria_clone(new_sub_criteria.res) if not new_sub_criteria_res: raise Error() if idmef_criteria_add_criteria(new_criteria.res, new_sub_criteria_res, operator) < 0: idmef_criteria_destroy(new_sub_criteria_res) raise Error() return new_criteria def __and__(self, new_sub_criteria): return self.__append(new_sub_criteria, operator_and) def __or__(self, new_sub_criteria): return self.__append(new_sub_criteria, operator_or)
|
def __append(self, value, operator): top_criteria = IDMEFCriteria() idmef_criteria_destroy(top_criteria.res)
|
def __init__(self, name, config):
|
def __init__(self, name, config=None):
|
def __init__(self, name, config): if not config: file = os.popen("libprelude-config --prefix") path = file.read() file.close() config = path[:-1] + "/etc/prelude/default/client.conf" Client.__init__(self, Client.SEND_IDMEF, name, config) self._analyzer = _prelude.prelude_client_get_analyzer(self._client) if not self._analyzer: _prelude.prelude_client_destroy(self._client) raise ClientError()
|
_prelude.type_IDMEF_VALUE_TYPE_UINT16: { 'py_type': [ int ],
|
_prelude.IDMEF_VALUE_TYPE_UINT16: { 'py_type': [ int ],
|
def _idmef_integer_python_to_c(object, py_value): value_type_table = { _prelude.IDMEF_VALUE_TYPE_INT16: { 'py_type': [ int ], 'check_value': lambda i: i >= -2 ** 15 and i < 2 ** 15, 'convert': _prelude.idmef_value_new_int16 }, _prelude.type_IDMEF_VALUE_TYPE_UINT16: { 'py_type': [ int ], 'check_value': lambda i: i >= 0 and i < 2 ** 16, 'convert': _prelude.idmef_value_new_uint16 }, _prelude.IDMEF_VALUE_TYPE_INT32: { 'py_type': [ int ], 'check': lambda i: i >= -2 ** 31 and i < 2 ** 31, 'convert': _prelude.idmef_value_new_int32 }, _prelude.IDMEF_VALUE_TYPE_UINT32: { 'py_type': [ int ], 'check_value': lambda i: i >= 0 and i < 2 ** 32, 'convert': _prelude.idmef_value_new_uint32 }, _prelude.IDMEF_VALUE_TYPE_INT64: { 'py_type': [ long, int ], 'check_value': lambda i: i >= -2 ** 63 and i < 2 ** 63, 'convert': _prelude.idmef_value_new_int64 }, _prelude.IDMEF_VALUE_TYPE_UINT64: { 'py_type': [ long, int ], 'check_value': lambda i: i >= 0 and i < 2 ** 64, 'convert': _prelude.idmef_value_new_uint64 }, _prelude.IDMEF_VALUE_TYPE_FLOAT: { 'py_type': [ float ], 'convert': _prelude.idmef_value_new_float }, _prelude.IDMEF_VALUE_TYPE_DOUBLE: { 'py_type': [ float ], 'convert': _prelude.idmef_value_new_double }, } object_type = _prelude.idmef_object_get_value_type(object) if type(py_value) not in value_type_table[object_type]['py_type']: raise IDMEFValueError(py_value, "expected %s, got %s" % (value_type_table[object_type]['py_type'][0], type(py_value))) if value_type_table[object_type].has_key('check_value'): if not value_type_table[object_type]['check_value'](py_value): raise IDMEFValueError(py_value, "out of range") return value_type_table[object_type]['convert'](py_value)
|
if _prelude.prelude_connection_connect(self._connection, self._profile, _prelude.PRELUDE_CONNECTION_CAPABILITY_CONNECT) < 0: raise ClientError("could not connect to %s:%d" % (address, port))
|
self.connect()
|
def __init__(self, name=None, address="127.0.0.1", port=5554): self._profile = _prelude.prelude_client_profile_new(name or sys.argv[0]) if not self._profile: raise ClientError("could not create client profile for %s" % name or sys.argv[0]) self._connection = _prelude.prelude_connection_new(address, port) if not self._connection: raise ClientError("could not create connection to %s:%d" % (address, port))
|
_prelude.prelude_option_new_request(self._msgbuf, 0, analyzer_path) _prelude.prelude_option_push_request(self._msgbuf, type, value)
|
ret = _prelude.prelude_option_new_request(self._msgbuf, 0, analyzer_path) if ret < 0: raise ClientError(ret) ret = _prelude.prelude_option_push_request(self._msgbuf, type, value) if ret < 0: raise ClientError(ret)
|
def _request(self, analyzer_path, type, value=None): _prelude.prelude_option_new_request(self._msgbuf, 0, analyzer_path) _prelude.prelude_option_push_request(self._msgbuf, type, value) _prelude.prelude_msgbuf_mark_end(self._msgbuf) msg = _prelude.my_prelude_msg_read(_prelude.prelude_connection_get_fd(self._connection)) return msg
|
msg = _prelude.my_prelude_msg_read(_prelude.prelude_connection_get_fd(self._connection))
|
msg = _prelude.prelude_connection_recv(self._connection) if not msg: raise Error("could not read request answer")
|
def _request(self, analyzer_path, type, value=None): _prelude.prelude_option_new_request(self._msgbuf, 0, analyzer_path) _prelude.prelude_option_push_request(self._msgbuf, type, value) _prelude.prelude_msgbuf_mark_end(self._msgbuf) msg = _prelude.my_prelude_msg_read(_prelude.prelude_connection_get_fd(self._connection)) return msg
|
if not msg: raise Error("PRELUDE_MSG_OPTION_LIST failed")
|
def get_option_list(self, analyzer_path): msg = self._request(analyzer_path, _prelude.PRELUDE_MSG_OPTION_LIST) if not msg: raise Error("PRELUDE_MSG_OPTION_LIST failed")
|
|
return
|
raise Error("could not retrieve option list")
|
def get_option_list(self, analyzer_path): msg = self._request(analyzer_path, _prelude.PRELUDE_MSG_OPTION_LIST) if not msg: raise Error("PRELUDE_MSG_OPTION_LIST failed")
|
retval = _prelude.prelude_option_recv_set(msg) return retval
|
if _prelude.prelude_option_recv_set(msg) < 0: raise Error("could not read set_option answer")
|
def set_option(self, analyzer_path, name, value=None): if value: value = "%s=%s" % (name, value) else: value = name msg = self._request(analyzer_path, _prelude.PRELUDE_MSG_OPTION_SET, value) retval = _prelude.prelude_option_recv_set(msg) return retval
|
if not msg: return retval = _prelude.prelude_option_recv_set(msg) return retval
|
if _prelude.prelude_option_recv_set(msg) < 0: raise Error("could not read commit answer")
|
def commit(self, analyzer_path, instance): msg = self._request(analyzer_path, _prelude.PRELUDE_MSG_OPTION_COMMIT, instance) if not msg: return retval = _prelude.prelude_option_recv_set(msg)
|
if not msg: return retval = _prelude.prelude_option_recv_set(msg) return retval
|
if _prelude.prelude_option_recv_set(msg) < 0: raise Error("could not read destroy answer")
|
def destroy(self, analyzer_path, instance): msg = self._request(analyzer_path, _prelude.PRELUDE_MSG_OPTION_DESTROY, instance) if not msg: return retval = _prelude.prelude_option_recv_set(msg)
|
self._exit_status = _prelude.PRELUDE_CLIENT_EXIST_STATUS_SUCCESS
|
self._exit_status = _prelude.PRELUDE_CLIENT_EXIT_STATUS_SUCCESS
|
def set_success(self): self._exit_status = _prelude.PRELUDE_CLIENT_EXIST_STATUS_SUCCESS
|
self._exit_status = _prelude.PRELUDE_CLIENT_EXIST_STATUS_FAILURE
|
self._exit_status = _prelude.PRELUDE_CLIENT_EXIT_STATUS_FAILURE
|
def set_failure(self): self._exit_status = _prelude.PRELUDE_CLIENT_EXIST_STATUS_FAILURE
|
_prelude.prelude_client_destroy(self._client)
|
_prelude.prelude_client_destroy(self._client, self._exit_status)
|
def __del__(self): #print "Client.__del__" if self._msgbuf: _prelude.prelude_msgbuf_close(self._msgbuf) #self._msgbuf = None
|
_prelude.prelude_client_destroy(self._client)
|
_prelude.prelude_client_destroy(self._client, _prelude.PRELUDE_CLIENT_EXIT_STATUS_FAILURE)
|
def __init__(self, name, config=None): if not config: file = os.popen("libprelude-config --prefix") path = file.read() file.close() config = path[:-1] + "/etc/prelude/default/client.conf" Client.__init__(self, Client.SEND_IDMEF, name, config) self._analyzer = _prelude.prelude_client_get_analyzer(self._client) if not self._analyzer: _prelude.prelude_client_destroy(self._client) raise ClientError()
|
_prelude.prelude_client_set_connection(self._client, self._manager_connection) _prelude.prelude_connection_connect(self._manager_connection)
|
if not self._manager_connection: raise ClientError("could not create new connection to %s:%p" % (address, port)) if _prelude.prelude_client_set_connection(self._client, self._manager_connection) < 0: raise ClientError("could not set client connection") if _prelude.prelude_connection_connect(self._manager_connection) < 0: raise ClientError("could not connect to manager")
|
def __init__(self, name=None, address="127.0.0.1", port=5554): Client.__init__(self, Client.SEND_ADMIN, name, None) self._manager_connection = _prelude.prelude_connection_new(self._client, address, port) _prelude.prelude_client_set_connection(self._client, self._manager_connection) _prelude.prelude_connection_connect(self._manager_connection)
|
def maintainence_price():
|
def maintainence_price(addMarkup = True):
|
def maintainence_price(): "Calculate the price of maintainence" price = (float(maintainenceCost[1]) / float(maintainenceCost[0])) / 1000.0 price = price * transfer if addMarkup: price = addPercent(price, markup) return price
|
price = (float(maintainenceCost[1]) / float(maintainenceCost[0])) / 1000.0 price = price * transfer
|
price = float(maintainenceCost[1]) / float(maintainenceCost[0]) price = price
|
def maintainence_price(): "Calculate the price of maintainence" price = (float(maintainenceCost[1]) / float(maintainenceCost[0])) / 1000.0 price = price * transfer if addMarkup: price = addPercent(price, markup) return price
|
def cmdITIME(self, source, name):
|
def cmdITIME(self, source):
|
def cmdITIME(self, source, name): """Synonym, of BEAT See: BEAT """
|
if tokens.peek().upper() == "LIST": tokens.next() self.doLIST(target) elif tokens.peek().upper() == "INFO": tokens.next() plugin = tokens.next() self.doINFO(target, plugin)
|
if tokens.more(): if tokens.peek().upper() == "LIST": tokens.next() self.doLIST(target) elif tokens.peek().upper() == "INFO": tokens.next() plugin = tokens.next() self.doINFO(target, plugin)
|
def onMESSAGE(self, source, target, message):
|
msg = 'Hostname: ' + socket.gethostbyaddr(host) else: msg = source[0] + ', Hostname: ' + socket.gethostbyaddr(host)
|
msg = "Hostname: %s" + name else: msg = "%s, Hostname: %s" % (source[0], name)
|
def doHOST(self, source, target, host): if re.search('[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+', host): if source[0] == target: msg = 'Hostname: ' + socket.gethostbyaddr(host) else: msg = source[0] + ', Hostname: ' + socket.gethostbyaddr(host) else: if source[0] == target: msg = 'IP: ' + socket.gethostbyname(host) else: msg = source[0] + ', IP: ' + socket.gethostbyname(host) self.ircPRIVMSG(target, msg)
|
msg = 'IP: ' + socket.gethostbyname(host) else: msg = source[0] + ', IP: ' + socket.gethostbyname(host)
|
msg = "IP: %s" % name else: msg = "%s, IP: %s" % (source[0], name)
|
def doHOST(self, source, target, host): if re.search('[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+', host): if source[0] == target: msg = 'Hostname: ' + socket.gethostbyaddr(host) else: msg = source[0] + ', Hostname: ' + socket.gethostbyaddr(host) else: if source[0] == target: msg = 'IP: ' + socket.gethostbyname(host) else: msg = source[0] + ', IP: ' + socket.gethostbyname(host) self.ircPRIVMSG(target, msg)
|
win.show()
|
def sighndl(sig, stack): win.show()
|
|
ret=self.__parse_results(cursor,callback, sofar)
|
ret=self.__parse_results(cursor,callback, sofar=sofar)
|
def query(self, query, callback): """execute a query and call callback on all the results, in the correct order.
|
if not sofar or id not in sofar: if sofar:
|
if sofar==None or id not in sofar: if sofar!=None:
|
def __parse_results(self, cursor, callback, sofar=None): """Call callback on all the results, in the correct order.
|
assert (RPath(self.lc, self.mainprefix, ("sampledir",)).listdir() == ["1", "2", "3", "4"])
|
dirlist = RPath(self.lc, self.mainprefix, ("sampledir",)).listdir() dirlist.sort() assert dirlist == ["1", "2", "3", "4"], dirlist
|
def testListdir(self): """Checking dir listings""" assert (RPath(self.lc, self.mainprefix, ("sampledir",)).listdir() == ["1", "2", "3", "4"])
|
if attr == 'com.apple.FinderInfo' or attr == 'come.apple.ResourceFork':
|
if attr == 'com.apple.FinderInfo' or attr == 'com.apple.ResourceFork':
|
def read_from_rp(self, rp): """Set the extended attributes from an rpath""" try: attr_list = rp.conn.xattr.listxattr(rp.path) except IOError, exc: if exc[0] == errno.EOPNOTSUPP: return # if not sup, consider empty if exc[0] == errno.EACCES: log.Log("Warning: listattr(%s): %s" % (rp.path, exc), 3) return raise for attr in attr_list: if attr.startswith('system.'): # Do not preserve system extended attributes continue if attr == 'com.apple.FinderInfo' or attr == 'come.apple.ResourceFork': # FinderInfo and Resource Fork handled elsewhere continue try: self.attr_dict[attr] = rp.conn.xattr.getxattr(rp.path, attr) except IOError, exc: # File probably modified while reading, just continue if exc[0] == errno.ENODATA: continue elif exc[0] == errno.ENOENT: break else: raise
|
if diff_rorp.isspecial():
|
if rf.metadata_rorp.isspecial():
|
def fast_process(self, index, rf): """Process when nothing is a directory""" if not rf.metadata_rorp.equal_loose(rf.mirror_rp): log.Log("Regressing file %s" % (rf.metadata_rorp.get_indexpath()), 5) if rf.metadata_rorp.isreg(): self.restore_orig_regfile(rf) else: if rf.mirror_rp.lstat(): rf.mirror_rp.delete() if diff_rorp.isspecial(): robust.check_common_error(None, rpath.copy_with_attribs, (rf.metadata_rorp, rf.mirror_rp)) else: rpath.copy_with_attribs(rf.metadata_rorp, rf.mirror_rp) if rf.regress_inc: log.Log("Deleting increment " + rf.regress_inc.path, 5) rf.regress_inc.delete()
|
assert not ext_rp.lstat()
|
if ext_rp: assert not ext_rp.lstat()
|
def set_extended_filenames(self, subdir): """Set self.extended_filenames by trying to write a path""" assert not self.read_only
|
The quoting this session appears to need do not match those in
|
The quoting chars this session needs (%s) do not match the repository settings (%s) listed in
|
def compare_ctq_file(self, rbdir, suggested_ctq): """Compare ctq file with suggested result, return actual ctq""" ctq_rp = rbdir.append("chars_to_quote") if not ctq_rp.lstat(): if Globals.chars_to_quote is None: actual_ctq = suggested_ctq else: actual_ctq = Globals.chars_to_quote ctq_rp.write_string(actual_ctq) return actual_ctq
|
backed up onto it. If you want to risk it, remove the file rdiff-backup-data/chars_to_quote.""" % (ctq_rp.path,))
|
backed up onto it.""" % (suggested_ctq, actual_ctq, ctq_rp.path))
|
def compare_ctq_file(self, rbdir, suggested_ctq): """Compare ctq file with suggested result, return actual ctq""" ctq_rp = rbdir.append("chars_to_quote") if not ctq_rp.lstat(): if Globals.chars_to_quote is None: actual_ctq = suggested_ctq else: actual_ctq = Globals.chars_to_quote ctq_rp.write_string(actual_ctq) return actual_ctq
|
if len(session_times) < session_num:
|
if len(session_times) <= session_num:
|
def time_from_session(session_num, rp = None): """Return time in seconds of given backup The current mirror is session_num 0, the next oldest increment has number 1, etc. Requires that the Globals.rbdir directory be set. """ session_times = Globals.rbdir.conn.restore.MirrorStruct \ .get_increment_times() session_times.sort() if len(session_times) < session_num: return session_times[0] # Use oldest if two few backups return session_times[-session_num-1]
|
str_list.append(" Uname %s\n" % rorpath.getuname() or ":")
|
str_list.append(" Uname %s\n" % (rorpath.getuname() or ":"))
|
def RORP2Record(rorpath): """From RORPath, return text record of file's metadata""" str_list = ["File %s\n" % quote_path(rorpath.get_indexpath())] # Store file type, e.g. "dev", "reg", or "sym", and type-specific data type = rorpath.gettype() if type is None: type = "None" str_list.append(" Type %s\n" % type) if type == "reg": str_list.append(" Size %s\n" % rorpath.getsize()) # If there is a resource fork, save it. if rorpath.has_resource_fork(): if not rorpath.get_resource_fork(): rf = "None" else: rf = binascii.hexlify(rorpath.get_resource_fork()) str_list.append(" ResourceFork %s\n" % (rf,)) # If there is Carbon data, save it. if rorpath.has_carbonfile(): if not rorpath.get_carbonfile(): cfile = "None" else: cfile = carbonfile2string(rorpath.get_carbonfile()) str_list.append(" CarbonFile %s\n" % (cfile,)) # If file is hardlinked, add that information if Globals.preserve_hardlinks: numlinks = rorpath.getnumlinks() if numlinks > 1: str_list.append(" NumHardLinks %s\n" % numlinks) str_list.append(" Inode %s\n" % rorpath.getinode()) str_list.append(" DeviceLoc %s\n" % rorpath.getdevloc()) elif type == "None": return "".join(str_list) elif type == "dir" or type == "sock" or type == "fifo": pass elif type == "sym": str_list.append(" SymData %s\n" % quote_path(rorpath.readlink())) elif type == "dev": major, minor = rorpath.getdevnums() if rorpath.isblkdev(): devchar = "b" else: assert rorpath.ischardev() devchar = "c" str_list.append(" DeviceNum %s %s %s\n" % (devchar, major, minor)) # Store time information if type != 'sym' and type != 'dev': str_list.append(" ModTime %s\n" % rorpath.getmtime()) # Add user, group, and permission information uid, gid = rorpath.getuidgid() str_list.append(" Uid %s\n" % uid) str_list.append(" Uname %s\n" % rorpath.getuname() or ":") str_list.append(" Gid %s\n" % gid) str_list.append(" Gname %s\n" % rorpath.getgname() or ":") str_list.append(" Permissions %s\n" % rorpath.getperms()) return "".join(str_list)
|
str_list.append(" Gname %s\n" % rorpath.getgname() or ":")
|
str_list.append(" Gname %s\n" % (rorpath.getgname() or ":"))
|
def RORP2Record(rorpath): """From RORPath, return text record of file's metadata""" str_list = ["File %s\n" % quote_path(rorpath.get_indexpath())] # Store file type, e.g. "dev", "reg", or "sym", and type-specific data type = rorpath.gettype() if type is None: type = "None" str_list.append(" Type %s\n" % type) if type == "reg": str_list.append(" Size %s\n" % rorpath.getsize()) # If there is a resource fork, save it. if rorpath.has_resource_fork(): if not rorpath.get_resource_fork(): rf = "None" else: rf = binascii.hexlify(rorpath.get_resource_fork()) str_list.append(" ResourceFork %s\n" % (rf,)) # If there is Carbon data, save it. if rorpath.has_carbonfile(): if not rorpath.get_carbonfile(): cfile = "None" else: cfile = carbonfile2string(rorpath.get_carbonfile()) str_list.append(" CarbonFile %s\n" % (cfile,)) # If file is hardlinked, add that information if Globals.preserve_hardlinks: numlinks = rorpath.getnumlinks() if numlinks > 1: str_list.append(" NumHardLinks %s\n" % numlinks) str_list.append(" Inode %s\n" % rorpath.getinode()) str_list.append(" DeviceLoc %s\n" % rorpath.getdevloc()) elif type == "None": return "".join(str_list) elif type == "dir" or type == "sock" or type == "fifo": pass elif type == "sym": str_list.append(" SymData %s\n" % quote_path(rorpath.readlink())) elif type == "dev": major, minor = rorpath.getdevnums() if rorpath.isblkdev(): devchar = "b" else: assert rorpath.ischardev() devchar = "c" str_list.append(" DeviceNum %s %s %s\n" % (devchar, major, minor)) # Store time information if type != 'sym' and type != 'dev': str_list.append(" ModTime %s\n" % rorpath.getmtime()) # Add user, group, and permission information uid, gid = rorpath.getuidgid() str_list.append(" Uid %s\n" % uid) str_list.append(" Uname %s\n" % rorpath.getuname() or ":") str_list.append(" Gid %s\n" % gid) str_list.append(" Gname %s\n" % rorpath.getgname() or ":") str_list.append(" Permissions %s\n" % rorpath.getperms()) return "".join(str_list)
|
if data == ":": data_dict['uname'] = None
|
if data == ":" or data == 'None': data_dict['uname'] = None
|
def Record2RORP(record_string): """Given record_string, return RORPath For speed reasons, write the RORPath data dictionary directly instead of calling rorpath functions. Profiling has shown this to be a time critical function. """ data_dict = {} for field, data in line_parsing_regexp.findall(record_string): if field == "File": index = quoted_filename_to_index(data) elif field == "Type": if data == "None": data_dict['type'] = None else: data_dict['type'] = data elif field == "Size": data_dict['size'] = long(data) elif field == "ResourceFork": if data == "None": data_dict['resourcefork'] = "" else: data_dict['resourcefork'] = binascii.unhexlify(data) elif field == "CarbonFile": if data == "None": data_dict['carbonfile'] = None else: data_dict['carbonfile'] = string2carbonfile(data) elif field == "NumHardLinks": data_dict['nlink'] = int(data) elif field == "Inode": data_dict['inode'] = long(data) elif field == "DeviceLoc": data_dict['devloc'] = long(data) elif field == "SymData": data_dict['linkname'] = unquote_path(data) elif field == "DeviceNum": devchar, major_str, minor_str = data.split(" ") data_dict['devnums'] = (devchar, int(major_str), int(minor_str)) elif field == "ModTime": data_dict['mtime'] = long(data) elif field == "Uid": data_dict['uid'] = int(data) elif field == "Gid": data_dict['gid'] = int(data) elif field == "Uname": if data == ":": data_dict['uname'] = None else: data_dict['uname'] = data elif field == "Gname": if data == ':': data_dict['gname'] = None else: data_dict['gname'] = data elif field == "Permissions": data_dict['perms'] = int(data) else: raise ParsingError("Unknown field in line '%s %s'" % (field, data)) return rpath.RORPath(index, data_dict)
|
if data == ':': data_dict['gname'] = None
|
if data == ':' or data == 'None': data_dict['gname'] = None
|
def Record2RORP(record_string): """Given record_string, return RORPath For speed reasons, write the RORPath data dictionary directly instead of calling rorpath functions. Profiling has shown this to be a time critical function. """ data_dict = {} for field, data in line_parsing_regexp.findall(record_string): if field == "File": index = quoted_filename_to_index(data) elif field == "Type": if data == "None": data_dict['type'] = None else: data_dict['type'] = data elif field == "Size": data_dict['size'] = long(data) elif field == "ResourceFork": if data == "None": data_dict['resourcefork'] = "" else: data_dict['resourcefork'] = binascii.unhexlify(data) elif field == "CarbonFile": if data == "None": data_dict['carbonfile'] = None else: data_dict['carbonfile'] = string2carbonfile(data) elif field == "NumHardLinks": data_dict['nlink'] = int(data) elif field == "Inode": data_dict['inode'] = long(data) elif field == "DeviceLoc": data_dict['devloc'] = long(data) elif field == "SymData": data_dict['linkname'] = unquote_path(data) elif field == "DeviceNum": devchar, major_str, minor_str = data.split(" ") data_dict['devnums'] = (devchar, int(major_str), int(minor_str)) elif field == "ModTime": data_dict['mtime'] = long(data) elif field == "Uid": data_dict['uid'] = int(data) elif field == "Gid": data_dict['gid'] = int(data) elif field == "Uname": if data == ":": data_dict['uname'] = None else: data_dict['uname'] = data elif field == "Gname": if data == ':': data_dict['gname'] = None else: data_dict['gname'] = data elif field == "Permissions": data_dict['perms'] = int(data) else: raise ParsingError("Unknown field in line '%s %s'" % (field, data)) return rpath.RORPath(index, data_dict)
|
self.conn.os.utime(self.path, (accesstime, modtime)) self.data['atime'] = accesstime self.data['mtime'] = modtime
|
try: self.conn.os.utime(self.path, (accesstime, modtime)) except OverflowError: log.Log("Cannot change times of %s to %s - problem is probably" "64->32bit conversion" % (self.path, (accesstime, modtime)), 2) else: self.data['atime'] = accesstime self.data['mtime'] = modtime
|
def settime(self, accesstime, modtime): """Change file modification times""" log.Log("Setting time of %s to %d" % (self.path, modtime), 7) self.conn.os.utime(self.path, (accesstime, modtime)) self.data['atime'] = accesstime self.data['mtime'] = modtime
|
self.conn.os.utime(self.path, (long(time.time()), modtime)) self.data['mtime'] = modtime
|
try: self.conn.os.utime(self.path, (long(time.time()), modtime)) except OverflowError: log.Log("Cannot change mtime of %s to %s - problem is probably" "64->32bit conversion" % (self.path, modtime), 2) else: self.data['mtime'] = modtime
|
def setmtime(self, modtime): """Set only modtime (access time to present)""" log.Log(lambda: "Setting time of %s to %d" % (self.path, modtime), 7) self.conn.os.utime(self.path, (long(time.time()), modtime)) self.data['mtime'] = modtime
|
retvalparts.append('createDate:%d' % cfile['createDate'])
|
try: retvalparts.append('createDate:%d' % cfile['createDate']) except KeyError: log.Log("Writing pre-1.1.6 style metadata, without creation date", 9)
|
def carbonfile2string(cfile): """Convert CarbonFile data to a string suitable for storing.""" if not cfile: return "None" retvalparts = [] retvalparts.append('creator:%s' % binascii.hexlify(cfile['creator'])) retvalparts.append('type:%s' % binascii.hexlify(cfile['type'])) retvalparts.append('location:%d,%d' % cfile['location']) retvalparts.append('flags:%d' % cfile['flags']) retvalparts.append('createDate:%d' % cfile['createDate']) return '|'.join(retvalparts)
|
if raw_rf: raw_rf.set_metadata_rorp(metadata_rorp) yield raw_rf else:
|
if not raw_rf:
|
def iterate_meta_rfs(mirror_rp, inc_rp): """Yield RegressFile objects with extra metadata information added Each RegressFile will have an extra object variable .metadata_rorp which will contain the metadata attributes of the mirror file at regress_time. """ raw_rfs = iterate_raw_rfs(mirror_rp, inc_rp) collated = rorpiter.Collate2Iters(raw_rfs, yield_metadata()) for raw_rf, metadata_rorp in collated: if raw_rf: raw_rf.set_metadata_rorp(metadata_rorp) yield raw_rf else: log.Log("Warning, metadata file has entry for %s,\n" "but there are no associated files." % (metadata_rorp.get_indexpath(),), 2) yield RegressFile(mirror_rp.new_index(metadata_rorp.index), inc_rp.new_index(metadata_rorp.index), ())
|
yield RegressFile(mirror_rp.new_index(metadata_rorp.index), inc_rp.new_index(metadata_rorp.index), ())
|
continue raw_rf.set_metadata_rorp(metadata_rorp) yield raw_rf
|
def iterate_meta_rfs(mirror_rp, inc_rp): """Yield RegressFile objects with extra metadata information added Each RegressFile will have an extra object variable .metadata_rorp which will contain the metadata attributes of the mirror file at regress_time. """ raw_rfs = iterate_raw_rfs(mirror_rp, inc_rp) collated = rorpiter.Collate2Iters(raw_rfs, yield_metadata()) for raw_rf, metadata_rorp in collated: if raw_rf: raw_rf.set_metadata_rorp(metadata_rorp) yield raw_rf else: log.Log("Warning, metadata file has entry for %s,\n" "but there are no associated files." % (metadata_rorp.get_indexpath(),), 2) yield RegressFile(mirror_rp.new_index(metadata_rorp.index), inc_rp.new_index(metadata_rorp.index), ())
|
if rid.inc_list: print "/".join(rid.index)
|
if rid.inc_list: if not rid.index: path = "." else: path = "/".join(rid.index) print "%-11s: %s" % (determineChangeType(rid.inc_list), path)
|
def get_rids_recursive(rid): """Yield all the rids under rid that have inc newer than rest_time""" yield rid for sub_rid in Restore.yield_rids(rid, rest_time, mirror_time): for sub_sub_rid in get_rids_recursive(sub_rid): yield sub_sub_rid
|
if Globals.get(attr) is not None:
|
if Globals.get(attr) is None:
|
def update_bool_global(attr, bool): """If bool is not None, update Globals.attr accordingly""" if Globals.get(attr) is not None: SetConnections.UpdateGlobal(attr, bool)
|
if Globals.get(attr) is not None: SetConnections.UpdateGlobal(attr, bool)
|
if Globals.get(attr) is None: SetConnections.UpdateGlobal(attr, bool)
|
def update_bool_global(attr, bool): """If bool is not None, update Globals.attr accordingly""" if Globals.get(attr) is not None: SetConnections.UpdateGlobal(attr, bool)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.