rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
icon = t.getProperty('icon_expr')
icon = t.getIconExprObject()
def addable_types(self, include=None): """Return menu item entries in a TAL-friendly form.
icon = t.getIconExprObject()
def addable_types(self, include=None): """Return menu item entries in a TAL-friendly form.
return context_state.folder()
if context_state.is_structural_folder(): return self.context else: return context_state.folder()
def add_context(self): context_state = getMultiAdapter((self.context, self.request), name='plone_context_state') return context_state.folder()
templateOutput = self.template().encode('utf-8')
templateOutput = self.template(self).encode('utf-8')
def __call__(self): response = self.request.response response.setHeader('Content-Type', 'application/xml+epub') response.setHeader('Content-Disposition', 'attachment; filename=%s.epub' % self.context.id)
zipFile.writestr('META-INF/container.xml', replace('META-INF/container', {}))
zipFile.writestr('META-INF/container.xml', replace('META-INF/container.xml', {}))
def __call__(self): response = self.request.response response.setHeader('Content-Type', 'application/xml+epub') response.setHeader('Content-Disposition', 'attachment; filename=%s.epub' % self.context.id)
folder = self.context[self.context.invokeFactory('Folder', id=epub.ploneID)]
id = epub.ploneID count = 0 while hasattr(self.context, id): count += 1 id = '%s-%i' % (epub.ploneID, count) folder = self.context[self.context.invokeFactory('Folder', id=id)]
def importFile(self, epubFile): zipFile = ZipFile(epubFile, 'r') epub = EpubFile(zipFile)
templateOutput = self.template(self).encode('utf-8')
templateOutput = self.template(self) templateOutput = templateOutput.decode('utf-8') templateOutput = templateOutput.encode('utf-8')
def __call__(self): response = self.request.response response.setHeader('Content-Type', 'application/xml+epub') response.setHeader('Content-Disposition', 'attachment; filename=%s.epub' % self.context.id)
description = description.text.strip()
def chapters(self): guide = self.rootFile.find('guide') if guide == None: return []
if( os.path.exists( self.ptMount + "/ttgo.bif" ) ):
fileTTGObif = self.ptMount + "/ttgo.bif" if( os.path.exists( fileTTGObif ) ):
def GetConfig( self ):
with open( self.ptMount + "/ttgo.bif", "rb" ) as ttgobif:
with open( fileTTGObif, "rb" ) as ttgobif:
def GetConfig( self ):
cmd = ("mkdir '" + self.dirPoi + filename + "'" )
cmd = ("mkdir -p '" + self.dirPoi + filename + "'" )
def addPoiToDatabase( self,entry ):
self.poiCombo.append_text( filename )
def addPoiToDatabase( self,entry ):
cmd += " 2> /dev/null | tail -n +2 | tr -s ' ' | cut -d ' ' -f 4,7 --output-delimiter=,"
cmd += " 2> /dev/null | tail -n +2 | tr -s ' ' | cut -d ' ' -f 4,7-"
def GetPtWithSize( self, type = None, ptMount = None ):
line = line.split( ',', 2 ) self.Debug( 5, "Command result: " + str( int( line[0 ] ) ) + " -> " + line[ 1 ] ) res.append( [ int( line[ 0 ] ), line[ 1 ] ] )
line = line.split( ' ', 1 ) self.Debug( 5, "Command result: " + str( int( line[ 0 ] ) ) + " -> " + line[ -1 ] ) res.append( [ int( line[ 0 ] ), line[ -1 ] ] )
def GetPtWithSize( self, type = None, ptMount = None ):
files = os.listdir( self.dirPoi ) files.sort() for file in files: self.poiCombo.append_text( file )
if( os.path.exists( self.dirPoi ) ): files = os.listdir( self.dirPoi ) files.sort() for file in files: self.poiCombo.append_text( file )
def FramePoi( self, notebook ):
cmd = ( "umount " + self.ptMount )
cmd = ( "umount '" + self.ptMount +"'" )
def UMount(self, mountPoint):
cmd = ("cp '" + self.dirPoi + selectedPoi + "/'* " + self.ptMount + "/" + self.CurrentMap )
cmd = ("cp '" + self.dirPoi + selectedPoi + "/'* '" + self.ptMount + "'/" + self.CurrentMap )
def addPoiToTomtom( self,entry ):
cmd = ("rm " + self.ptMount + "/" + self.CurrentMap + "/'" + file + "'")
cmd = ("rm -f '" + self.ptMount + "'/" + self.CurrentMap + "/'" + file + "'")
def delPoiOnTomtom( self,entry ):
cmd = "cd " + self.ptMount + "; tar -" + option + "f \"" + self.fileName + "\" ."
cmd = "cd '" + self.ptMount + "'; tar -" + option + "f \"" + self.fileName + "\" ."
def BackupRestoreGPS( self, widget, type ):
cmd = "cd " + self.ptMount + "; echo tar -" + option + "f \"" + self.fileName + "\" ."
cmd = "cd '" + self.ptMount + "'; echo tar -" + option + "f \"" + self.fileName + "\" ."
def BackupRestoreGPS( self, widget, type ):
self.popup = gtk.FileChooserDialog( _( "Open..." ), gtk.Window( gtk.WINDOW_TOPLEVEL ),
self.popup = gtk.FileChooserDialog( _( "Open folder..." ), gtk.Window( gtk.WINDOW_TOPLEVEL ),
def addPoiToDatabase( self,entry ):
ip_tags = TagContainer(ip) control_tags = TagContainer(control)
ip_tags = TagContainer() control_tags = TagContainer() ip_tags(ip) control_tags(control)
def main(): # option parser usage = 'peakzilla [OPTIONS] IP_ALIGNMENT CONTROL_ALIGNMENT > RESULTS' parser = OptionParser(usage=usage) parser.add_option("-s", "--fragment_size",\ type = "int", dest="fragment_size", default="200",\ help = "fragment size in bp used to define the upper limit of peak size for modeling: default=200") parser.add_option("-m", "--model_threshold",\ type = "float", dest="model_threshold", default="120",\ help = "fold enrichment threshold over average read density for building the peak model: default=120") parser.add_option("-t", "--peak_threshold",\ type = "float", dest="peak_threshold", default="40",\ help = "fold enrichment threshold over average read density for finding candidate peaks: default=40") parser.add_option("-f", "--fdr",\ type = "float", dest="fdr", default='1',\ help = "cutoff for the estimated FDR value: default = 1") parser.add_option("-q", "--quiet",\ action = "store_false", dest="verbose", default=True,\ help = "don't print status messages") # read arguments and options (options, args) = parser.parse_args() if len(args) != 2: # if incorrect number of arguments are provided return help message parser.print_help() sys.exit(0) ip = args[0] control = args[1] # load tags print_status('Loading tags ...', options.verbose) ip_tags = TagContainer(ip) control_tags = TagContainer(control) # first attempt of modeling peak size print_status('Modeling peak size and shift ...', options.verbose) peak_model = PeakModel(ip_tags, options.fragment_size, options.model_threshold) # change model threshold until it yields a reasonable number of peaks while peak_model.peaks_incorporated < 800 or peak_model.peaks_incorporated > 1200: if peak_model.peaks_incorporated < 800: options.model_threshold = options.model_threshold / 2 print_status('Model threshold was set too high, trying: %.1f' % options.model_threshold, options.verbose) peak_model = PeakModel(ip_tags, options.fragment_size, options.model_threshold) else: options.model_threshold = options.model_threshold * 1.5 print_status('Model threshold was set too low, trying: %.1f' % options.model_threshold, options.verbose) peak_model = PeakModel(ip_tags, options.fragment_size, options.model_threshold) print_status('Used best %d peaks for modeling ...' % peak_model.peaks_incorporated, options.verbose) print_status('Peak size is %d bp' % peak_model.peak_size, options.verbose) # first attempt to find candidate peaks in control sample print_status('Finding potential false positives ...', options.verbose) control_peaks = PeakContainer(control_tags, ip_tags, peak_model.peak_size, options.peak_threshold) # change peak threshold until it yields a reasonable number of peaks while control_peaks.peak_count < 1000 or control_peaks.peak_count > 10000: if control_peaks.peak_count < 1000: options.peak_threshold = options.peak_threshold / 2 print_status('Peak threshold was set too high, trying: %.2f' % options.peak_threshold, options.verbose) control_peaks = PeakContainer(control_tags, ip_tags, peak_model.peak_size, options.peak_threshold) else: options.peak_threshold = options.peak_threshold * 1.5 print_status('Peak threshold was set too low, trying: %.2f' % options.peak_threshold, options.verbose) control_peaks = PeakContainer(control_tags, ip_tags, peak_model.peak_size, options.peak_threshold) print_status('%d potential false positives found' % control_peaks.peak_count, options.verbose) # find candidate peaks in IP sample print_status('Finding peak candidates ...', options.verbose) ip_peaks = PeakContainer(ip_tags, control_tags, peak_model.peak_size, options.peak_threshold) print_status('%d candidate peaks found' % ip_peaks.peak_count, options.verbose) # build distribution model print_status('Modeling tag distribution ...', options.verbose) distribution_model = ip_peaks.model_tag_distribution() # calculate tag distribution scores print_status('Calculating tag distribution scores ...', options.verbose) ip_peaks.determine_distribution_scores(distribution_model) control_peaks.determine_distribution_scores(distribution_model) # calculate FDR print_status('Calculating FDR ...', options.verbose) ip_peaks.calculate_fdr(control_peaks.peaks) # write output as bed files print_status('Writing results to file ...', options.verbose) ip_peaks.write_to_stdout(options.fdr) print_status('Done!', options.verbose)
usage = 'peakzilla [OPTIONS] IP_ALIGNMENT CONTROL_ALIGNMENT > RESULTS'
usage = 'peakzilla [OPTIONS] chip.bed control.bed > results.tsv'
def main(): # option parser usage = 'peakzilla [OPTIONS] IP_ALIGNMENT CONTROL_ALIGNMENT > RESULTS' parser = OptionParser(usage=usage) parser.add_option("-s", "--fragment_size",\ type = "int", dest="fragment_size", default="200",\ help = "fragment size in bp used to define the upper limit of peak size for modeling: default=200") parser.add_option("-m", "--model_threshold",\ type = "float", dest="model_threshold", default="120",\ help = "fold enrichment threshold over average read density for building the peak model: default=120") parser.add_option("-t", "--peak_threshold",\ type = "float", dest="peak_threshold", default="40",\ help = "fold enrichment threshold over average read density for finding candidate peaks: default=40") parser.add_option("-f", "--fdr",\ type = "float", dest="fdr", default='1',\ help = "cutoff for the estimated FDR value: default = 1") parser.add_option("-q", "--quiet",\ action = "store_false", dest="verbose", default=True,\ help = "don't print status messages") # read arguments and options (options, args) = parser.parse_args() if len(args) != 2: # if incorrect number of arguments are provided return help message parser.print_help() sys.exit(0) ip = args[0] control = args[1] # load tags print_status('Loading tags ...', options.verbose) ip_tags = TagContainer() control_tags = TagContainer() ip_tags(ip) control_tags(control) # first attempt of modeling peak size print_status('Modeling peak size and shift ...', options.verbose) peak_model = PeakModel(ip_tags, options.fragment_size, options.model_threshold) # change model threshold until it yields a reasonable number of peaks while peak_model.peaks_incorporated < 800 or peak_model.peaks_incorporated > 1200: if peak_model.peaks_incorporated < 800: options.model_threshold = options.model_threshold / 2 print_status('Model threshold was set too high, trying: %.1f' % options.model_threshold, options.verbose) peak_model = PeakModel(ip_tags, options.fragment_size, options.model_threshold) else: options.model_threshold = options.model_threshold * 1.5 print_status('Model threshold was set too low, trying: %.1f' % options.model_threshold, options.verbose) peak_model = PeakModel(ip_tags, options.fragment_size, options.model_threshold) print_status('Used best %d peaks for modeling ...' % peak_model.peaks_incorporated, options.verbose) print_status('Peak size is %d bp' % peak_model.peak_size, options.verbose) # first attempt to find candidate peaks in control sample print_status('Finding potential false positives ...', options.verbose) control_peaks = PeakContainer(control_tags, ip_tags, peak_model.peak_size, options.peak_threshold) # change peak threshold until it yields a reasonable number of peaks while control_peaks.peak_count < 2000 or control_peaks.peak_count > 10000: if control_peaks.peak_count < 2000: options.peak_threshold = options.peak_threshold / 2 print_status('Peak threshold was set too high, trying: %.2f' % options.peak_threshold, options.verbose) control_peaks = PeakContainer(control_tags, ip_tags, peak_model.peak_size, options.peak_threshold) else: options.peak_threshold = options.peak_threshold * 1.5 print_status('Peak threshold was set too low, trying: %.2f' % options.peak_threshold, options.verbose) control_peaks = PeakContainer(control_tags, ip_tags, peak_model.peak_size, options.peak_threshold) print_status('%d potential false positives found' % control_peaks.peak_count, options.verbose) # find candidate peaks in IP sample print_status('Finding peak candidates ...', options.verbose) ip_peaks = PeakContainer(ip_tags, control_tags, peak_model.peak_size, options.peak_threshold) print_status('%d candidate peaks found' % ip_peaks.peak_count, options.verbose) # build distribution model print_status('Modeling tag distribution ...', options.verbose) distribution_model = ip_peaks.model_tag_distribution() # calculate tag distribution scores print_status('Calculating tag distribution scores ...', options.verbose) ip_peaks.determine_distribution_scores(distribution_model) control_peaks.determine_distribution_scores(distribution_model) # calculate FDR print_status('Calculating FDR ...', options.verbose) ip_peaks.calculate_fdr(control_peaks.peaks) # write output as bed files print_status('Writing results to file ...', options.verbose) ip_peaks.write_to_stdout(options.fdr) print_status('Done!', options.verbose)
self.peak_shift = int(median(self.peak_shifts)) self.peak_size = self.peak_shift * 2
if self.peak_shifts: self.peak_shift = int(median(self.peak_shifts)) self.peak_size = self.peak_shift * 2
def build(self): # for all chromosomes look for shifted peaks for chrom in self.tags.get_chrom_names(): plus_peaks = self.find_simple_peaks(chrom, '+') minus_peaks = self.find_simple_peaks(chrom, '-') self.determine_shifts(plus_peaks, minus_peaks) # calculate the meidan peak_shift self.peak_shift = int(median(self.peak_shifts)) # peak size is 2 * shift size self.peak_size = self.peak_shift * 2
candidate_survivals = 0
peak_candidate.survivals = 0
def find_peaks(self, chrom): # identify peak candidates on chromosome self.peaks[chrom] = [] # convert tag arrays to deque for fast appending and popping plus_tags = deque(self.ip_tags.get_tags(chrom, '+')) minus_tags = deque(self.ip_tags.get_tags(chrom, '-')) # initalize windows and stuff plus_window = deque([]) minus_window = deque([]) score_buffer = deque([]) peak_candidate = Peak() position = 0 while plus_tags and minus_tags: # fill windows while plus_tags and plus_tags[0] < position: plus_window.append(plus_tags.popleft()) while minus_tags and minus_tags[0] < (position + self.peak_shift): minus_window.append(minus_tags.popleft()) # get rid of old tags not fitting in the window any more while plus_window and plus_window[0] < (position - self.peak_shift): plus_window.popleft() while minus_window and minus_window[0] < position: minus_window.popleft() # add position to region if over threshold score = len(plus_window) + len(minus_window) if score > self.tag_threshold: # save all scores in buffer score_buffer.append(score) # get rid of old scores that are outside of the filter if len(score_buffer) > self.peak_size: score_buffer.popleft() # if current score is as big or bigger, consider it instead if score >= peak_candidate.tag_count: peak_candidate.tag_count = score peak_candidate.position = position peak_candidate.tags = list(plus_window) + list(minus_window) candidate_survivals = 0 # candidate survives if current score is smaller else: candidate_survivals += 1 # if candidate survives long enough do the expensive lookup if candidate_survivals == self.peak_shift: # check score buffer to see whether candidate is a maximum # candidate is in the middle of the buffer now if peak_candidate.tag_count == max(score_buffer): self.add_peak(peak_candidate, chrom) # consider current score next, reset survivals peak_candidate.tag_count = score peak_candidate.position = position peak_candidate.tags = list(plus_window) + list(minus_window) candidate_survivals = 0 # while in enriched region move windows in 1 bp steps position += 1 else: # if we still have a candidate check whether its a max and add if peak_candidate: if peak_candidate.tag_count == max(score_buffer): self.add_peak(peak_candidate, chrom) peak_candidate = Peak() candidate_survivals = 0 score_buffer = deque([]) # determine the next informative position in the genome and move there if plus_tags and minus_tags: distance_to_next = plus_tags[0] - position + 1 position += distance_to_next
candidate_survivals += 1
peak_candidate.survivals += 1
def find_peaks(self, chrom): # identify peak candidates on chromosome self.peaks[chrom] = [] # convert tag arrays to deque for fast appending and popping plus_tags = deque(self.ip_tags.get_tags(chrom, '+')) minus_tags = deque(self.ip_tags.get_tags(chrom, '-')) # initalize windows and stuff plus_window = deque([]) minus_window = deque([]) score_buffer = deque([]) peak_candidate = Peak() position = 0 while plus_tags and minus_tags: # fill windows while plus_tags and plus_tags[0] < position: plus_window.append(plus_tags.popleft()) while minus_tags and minus_tags[0] < (position + self.peak_shift): minus_window.append(minus_tags.popleft()) # get rid of old tags not fitting in the window any more while plus_window and plus_window[0] < (position - self.peak_shift): plus_window.popleft() while minus_window and minus_window[0] < position: minus_window.popleft() # add position to region if over threshold score = len(plus_window) + len(minus_window) if score > self.tag_threshold: # save all scores in buffer score_buffer.append(score) # get rid of old scores that are outside of the filter if len(score_buffer) > self.peak_size: score_buffer.popleft() # if current score is as big or bigger, consider it instead if score >= peak_candidate.tag_count: peak_candidate.tag_count = score peak_candidate.position = position peak_candidate.tags = list(plus_window) + list(minus_window) candidate_survivals = 0 # candidate survives if current score is smaller else: candidate_survivals += 1 # if candidate survives long enough do the expensive lookup if candidate_survivals == self.peak_shift: # check score buffer to see whether candidate is a maximum # candidate is in the middle of the buffer now if peak_candidate.tag_count == max(score_buffer): self.add_peak(peak_candidate, chrom) # consider current score next, reset survivals peak_candidate.tag_count = score peak_candidate.position = position peak_candidate.tags = list(plus_window) + list(minus_window) candidate_survivals = 0 # while in enriched region move windows in 1 bp steps position += 1 else: # if we still have a candidate check whether its a max and add if peak_candidate: if peak_candidate.tag_count == max(score_buffer): self.add_peak(peak_candidate, chrom) peak_candidate = Peak() candidate_survivals = 0 score_buffer = deque([]) # determine the next informative position in the genome and move there if plus_tags and minus_tags: distance_to_next = plus_tags[0] - position + 1 position += distance_to_next
if candidate_survivals == self.peak_shift:
if peak_candidate.survivals == self.peak_shift:
def find_peaks(self, chrom): # identify peak candidates on chromosome self.peaks[chrom] = [] # convert tag arrays to deque for fast appending and popping plus_tags = deque(self.ip_tags.get_tags(chrom, '+')) minus_tags = deque(self.ip_tags.get_tags(chrom, '-')) # initalize windows and stuff plus_window = deque([]) minus_window = deque([]) score_buffer = deque([]) peak_candidate = Peak() position = 0 while plus_tags and minus_tags: # fill windows while plus_tags and plus_tags[0] < position: plus_window.append(plus_tags.popleft()) while minus_tags and minus_tags[0] < (position + self.peak_shift): minus_window.append(minus_tags.popleft()) # get rid of old tags not fitting in the window any more while plus_window and plus_window[0] < (position - self.peak_shift): plus_window.popleft() while minus_window and minus_window[0] < position: minus_window.popleft() # add position to region if over threshold score = len(plus_window) + len(minus_window) if score > self.tag_threshold: # save all scores in buffer score_buffer.append(score) # get rid of old scores that are outside of the filter if len(score_buffer) > self.peak_size: score_buffer.popleft() # if current score is as big or bigger, consider it instead if score >= peak_candidate.tag_count: peak_candidate.tag_count = score peak_candidate.position = position peak_candidate.tags = list(plus_window) + list(minus_window) candidate_survivals = 0 # candidate survives if current score is smaller else: candidate_survivals += 1 # if candidate survives long enough do the expensive lookup if candidate_survivals == self.peak_shift: # check score buffer to see whether candidate is a maximum # candidate is in the middle of the buffer now if peak_candidate.tag_count == max(score_buffer): self.add_peak(peak_candidate, chrom) # consider current score next, reset survivals peak_candidate.tag_count = score peak_candidate.position = position peak_candidate.tags = list(plus_window) + list(minus_window) candidate_survivals = 0 # while in enriched region move windows in 1 bp steps position += 1 else: # if we still have a candidate check whether its a max and add if peak_candidate: if peak_candidate.tag_count == max(score_buffer): self.add_peak(peak_candidate, chrom) peak_candidate = Peak() candidate_survivals = 0 score_buffer = deque([]) # determine the next informative position in the genome and move there if plus_tags and minus_tags: distance_to_next = plus_tags[0] - position + 1 position += distance_to_next
peak_candidate.tag_count = score peak_candidate.position = position peak_candidate.tags = list(plus_window) + list(minus_window) candidate_survivals = 0
peak_candidate = Peak()
def find_peaks(self, chrom): # identify peak candidates on chromosome self.peaks[chrom] = [] # convert tag arrays to deque for fast appending and popping plus_tags = deque(self.ip_tags.get_tags(chrom, '+')) minus_tags = deque(self.ip_tags.get_tags(chrom, '-')) # initalize windows and stuff plus_window = deque([]) minus_window = deque([]) score_buffer = deque([]) peak_candidate = Peak() position = 0 while plus_tags and minus_tags: # fill windows while plus_tags and plus_tags[0] < position: plus_window.append(plus_tags.popleft()) while minus_tags and minus_tags[0] < (position + self.peak_shift): minus_window.append(minus_tags.popleft()) # get rid of old tags not fitting in the window any more while plus_window and plus_window[0] < (position - self.peak_shift): plus_window.popleft() while minus_window and minus_window[0] < position: minus_window.popleft() # add position to region if over threshold score = len(plus_window) + len(minus_window) if score > self.tag_threshold: # save all scores in buffer score_buffer.append(score) # get rid of old scores that are outside of the filter if len(score_buffer) > self.peak_size: score_buffer.popleft() # if current score is as big or bigger, consider it instead if score >= peak_candidate.tag_count: peak_candidate.tag_count = score peak_candidate.position = position peak_candidate.tags = list(plus_window) + list(minus_window) candidate_survivals = 0 # candidate survives if current score is smaller else: candidate_survivals += 1 # if candidate survives long enough do the expensive lookup if candidate_survivals == self.peak_shift: # check score buffer to see whether candidate is a maximum # candidate is in the middle of the buffer now if peak_candidate.tag_count == max(score_buffer): self.add_peak(peak_candidate, chrom) # consider current score next, reset survivals peak_candidate.tag_count = score peak_candidate.position = position peak_candidate.tags = list(plus_window) + list(minus_window) candidate_survivals = 0 # while in enriched region move windows in 1 bp steps position += 1 else: # if we still have a candidate check whether its a max and add if peak_candidate: if peak_candidate.tag_count == max(score_buffer): self.add_peak(peak_candidate, chrom) peak_candidate = Peak() candidate_survivals = 0 score_buffer = deque([]) # determine the next informative position in the genome and move there if plus_tags and minus_tags: distance_to_next = plus_tags[0] - position + 1 position += distance_to_next
candidate_survivals = 0
def find_peaks(self, chrom): # identify peak candidates on chromosome self.peaks[chrom] = [] # convert tag arrays to deque for fast appending and popping plus_tags = deque(self.ip_tags.get_tags(chrom, '+')) minus_tags = deque(self.ip_tags.get_tags(chrom, '-')) # initalize windows and stuff plus_window = deque([]) minus_window = deque([]) score_buffer = deque([]) peak_candidate = Peak() position = 0 while plus_tags and minus_tags: # fill windows while plus_tags and plus_tags[0] < position: plus_window.append(plus_tags.popleft()) while minus_tags and minus_tags[0] < (position + self.peak_shift): minus_window.append(minus_tags.popleft()) # get rid of old tags not fitting in the window any more while plus_window and plus_window[0] < (position - self.peak_shift): plus_window.popleft() while minus_window and minus_window[0] < position: minus_window.popleft() # add position to region if over threshold score = len(plus_window) + len(minus_window) if score > self.tag_threshold: # save all scores in buffer score_buffer.append(score) # get rid of old scores that are outside of the filter if len(score_buffer) > self.peak_size: score_buffer.popleft() # if current score is as big or bigger, consider it instead if score >= peak_candidate.tag_count: peak_candidate.tag_count = score peak_candidate.position = position peak_candidate.tags = list(plus_window) + list(minus_window) candidate_survivals = 0 # candidate survives if current score is smaller else: candidate_survivals += 1 # if candidate survives long enough do the expensive lookup if candidate_survivals == self.peak_shift: # check score buffer to see whether candidate is a maximum # candidate is in the middle of the buffer now if peak_candidate.tag_count == max(score_buffer): self.add_peak(peak_candidate, chrom) # consider current score next, reset survivals peak_candidate.tag_count = score peak_candidate.position = position peak_candidate.tags = list(plus_window) + list(minus_window) candidate_survivals = 0 # while in enriched region move windows in 1 bp steps position += 1 else: # if we still have a candidate check whether its a max and add if peak_candidate: if peak_candidate.tag_count == max(score_buffer): self.add_peak(peak_candidate, chrom) peak_candidate = Peak() candidate_survivals = 0 score_buffer = deque([]) # determine the next informative position in the genome and move there if plus_tags and minus_tags: distance_to_next = plus_tags[0] - position + 1 position += distance_to_next
global IGNORE_LIST
def load_lists(force=False): """Load ignore and filtered word lists""" debug_print('Loading ignore list') if not IGNORE_LIST or force is True: global IGNORE_LIST IGNORE_LIST = [ line.lower().strip() for line in open(settings.ignore_list) ] debug_print('Loading filtered word list') if not FILTER_WORDS or force is True: global FILTER_WORDS FILTER_WORDS = [ line.lower().strip() for line in open(settings.filtered_word_list) ]
global FILTER_WORDS
def load_lists(force=False): """Load ignore and filtered word lists""" debug_print('Loading ignore list') if not IGNORE_LIST or force is True: global IGNORE_LIST IGNORE_LIST = [ line.lower().strip() for line in open(settings.ignore_list) ] debug_print('Loading filtered word list') if not FILTER_WORDS or force is True: global FILTER_WORDS FILTER_WORDS = [ line.lower().strip() for line in open(settings.filtered_word_list) ]
else: save_id(settings.lastid,reply.id)
save_id(settings.lastid,reply.id)
def main(): auth = tweepy.BasicAuthHandler(username=settings.username, password=settings.password) api = tweepy.API(auth_handler=auth, secure=True, retry_count=3) last_id = get_last_id(settings.lastid) debug_print('Loading friends list') friends = api.friends_ids() debug_print('Friend list loaded, size: %d' % len(friends)) try: debug_print('Retrieving mentions') replies = api.mentions() except Exception, e: # quit on error here print e exit(1) # want these in ascending order, api orders them descending replies.reverse() for reply in replies: # ignore tweet if it's id is lower than our last tweeted id if reply.id > last_id and reply.user.id in friends: try: careful_retweet(api,reply) except HTTPError, e: print e.code() print e.read() except Exception, e: print 'e: %s' % e print repr(e) else: save_id(settings.lastid,reply.id) debug_print('Exiting cleanly')
else: debug_print('Received smaller ID, not saving. Old: %d, New: %s' % ( last_id, id))
def save_id(statefile,id): """Save last status ID to a file""" last_id = get_last_id(statefile) if last_id < id: f = open(statefile,'w') f.write(str(id)) # no trailing newline f.close() # Don't need to do anything if for some reason the ID got smaller
pass
for fd, obj in asyncore.socket_map.items(): obj.close()
def main_func(): options = _parse_options() if options.message_filter: from utils import MessageMap MessageMap.set_filter(options.message_filter) os.chdir(options.root) try: run_proxy(options) except KeyboardInterrupt: # todo: shut down the open connections cleanly pass """ import cProfile, sys p=open("profile", "w") sys.stdout = p cProfile.run("run_proxy(count = 5000, context = options)") p.close() """
msg = '["%s",%s,%s,%s,%s]' % (
message = '["%s",%s,%s,%s,%s]' % (
def handle_scope_message(self, msg): msg = '["%s",%s,%s,%s,%s]' % ( msg[1], # service msg[2], # command msg[4], # status msg[5], # tag msg[8], # payload ) if self.debug: pretty_print("send to client:", msg, self.debug_format, self.debug_format_payload) self.send_message(msg)
self.send_message(msg)
self.send_message(message)
def handle_scope_message(self, msg): msg = '["%s",%s,%s,%s,%s]' % ( msg[1], # service msg[2], # command msg[4], # status msg[5], # tag msg[8], # payload ) if self.debug: pretty_print("send to client:", msg, self.debug_format, self.debug_format_payload) self.send_message(msg)
return ''.join([chr((n >> 8 * (3 - i)) & 0xff) for i in range(4)])
return pack("!I", n)
def _get_number(self, in_str): n = int(''.join([i for i in in_str if i.isdigit()])) / in_str.count(' ') return ''.join([chr((n >> 8 * (3 - i)) & 0xff) for i in range(4)]) # throws DeprecationWarning: struct integer overflow masking is deprecated # for e.g. 2621808669 with Python 2.6.5 # return pack("!i", n & 0xffffffff)
print "%s%s: %s" % ( indent * INDENT, name, value)
try: print "%s%s: %s" % ( indent * INDENT, name, value) except: print "%s%s: %s%s" % ( indent * INDENT, name, value[0:100], '...')
def pretty_print_payload_item(indent, name, definition, item): if item and "message" in definition: print "%s%s:" % (indent * INDENT, name) pretty_print_payload(item, definition["message"], indent=indent+1) else: value = item if "enum" in definition: value = "%s (%s)" % (definition['enum']['numbers'][item], item) elif item == None: value = "null" elif isinstance(item, str): value = "\"%s\"" % item print "%s%s: %s" % ( indent * INDENT, name, value)
self.PATH_INFO = self.REQUEST_URI[pos:]
path_info = self.REQUEST_URI[pos:] if "?" in path_info: path_info = path_info[0:path_info.find("?")] self.PATH_INFO = path_info
def check_is_cgi(self, system_path, handler=".cgi"): # system path of the cgi script self.cgi_script = "" self.SCRIPT_NAME = "" self.PATH_INFO = "" if handler in system_path: script_path = system_path[0:system_path.find(handler) + len(handler)] if isfile(script_path): self.cgi_script = script_path pos = self.REQUEST_URI.find(handler) + len(handler) self.SCRIPT_NAME = self.REQUEST_URI[0:pos] self.PATH_INFO = self.REQUEST_URI[pos:] return bool(self.cgi_script)
headers_raw, first_line, self.headers, self.in_buffer = raw_parsed_headers
(headers_raw, first_line, self.headers, self.in_buffer) = raw_parsed_headers
def read_headers(self): raw_parsed_headers = parse_headers(self.in_buffer) if raw_parsed_headers: # to dispatch any hanging timeout response self.flush() headers_raw, first_line, self.headers, self.in_buffer = raw_parsed_headers method, path, protocol = first_line.split(BLANK, 2) self.REQUEST_URI = path path = path.lstrip("/") if "?" in path: path, self.query = path.split('?', 1) arguments = path.split("/") command = arguments and arguments.pop(0) or "" command = command.replace('-', '_').replace('.', '_') system_path = URI_to_system_path(path.rstrip("/")) or "." self.method = method self.path = path self.command = command self.arguments = arguments self.system_path = system_path self.timeout = time() + TIMEOUT if self.cgi_enabled: self.check_is_cgi(system_path) # POST if method == "POST": if "Content-Length" in self.headers: self.content_length = int(self.headers["Content-Length"]) self.check_input = self.read_content self.check_input() # GET elif method == "GET": if hasattr(self, command) and hasattr(getattr(self, command), '__call__'): getattr(self, command)() else: if self.cgi_script: self.handle_cgi() elif os.path.exists(system_path) or not path: self.serve(path, system_path) elif path == "favicon.ico": self.serve(path, path_join(SOURCE_ROOT, "favicon.ico")) else: content = "The server cannot handle: %s" % path self.out_buffer += NOT_FOUND % ( get_timestamp(), len(content), content) self.timeout = 0 if self.in_buffer: self.check_input() # Not implemented method else: content = "The server cannot handle: %s" % method self.out_buffer += NOT_FOUND % ( get_timestamp(), len(content), content) self.timeout = 0
if hasattr(self, command) and hasattr(getattr(self, command), '__call__'):
if hasattr(self, command) and \ hasattr(getattr(self, command), '__call__'):
def read_headers(self): raw_parsed_headers = parse_headers(self.in_buffer) if raw_parsed_headers: # to dispatch any hanging timeout response self.flush() headers_raw, first_line, self.headers, self.in_buffer = raw_parsed_headers method, path, protocol = first_line.split(BLANK, 2) self.REQUEST_URI = path path = path.lstrip("/") if "?" in path: path, self.query = path.split('?', 1) arguments = path.split("/") command = arguments and arguments.pop(0) or "" command = command.replace('-', '_').replace('.', '_') system_path = URI_to_system_path(path.rstrip("/")) or "." self.method = method self.path = path self.command = command self.arguments = arguments self.system_path = system_path self.timeout = time() + TIMEOUT if self.cgi_enabled: self.check_is_cgi(system_path) # POST if method == "POST": if "Content-Length" in self.headers: self.content_length = int(self.headers["Content-Length"]) self.check_input = self.read_content self.check_input() # GET elif method == "GET": if hasattr(self, command) and hasattr(getattr(self, command), '__call__'): getattr(self, command)() else: if self.cgi_script: self.handle_cgi() elif os.path.exists(system_path) or not path: self.serve(path, system_path) elif path == "favicon.ico": self.serve(path, path_join(SOURCE_ROOT, "favicon.ico")) else: content = "The server cannot handle: %s" % path self.out_buffer += NOT_FOUND % ( get_timestamp(), len(content), content) self.timeout = 0 if self.in_buffer: self.check_input() # Not implemented method else: content = "The server cannot handle: %s" % method self.out_buffer += NOT_FOUND % ( get_timestamp(), len(content), content) self.timeout = 0
environ["PATH_TRANSLATED"] = cwd + self.PATH_INFO.replace("/", os.path.sep)
environ["PATH_TRANSLATED"] = \ cwd + self.PATH_INFO.replace("/", os.path.sep)
def handle_cgi(self): import subprocess is_failed = False remote_addr, remote_port = self.socket.getpeername() cwd = os.getcwd() environ = { # os "COMSPEC": os.environ["COMSPEC"], "PATH": os.environ["PATH"], "PATHEXT": os.environ["PATHEXT"], "SYSTEMROOT": os.environ["SYSTEMROOT"], "WINDIR": os.environ["WINDIR"], # server "DOCUMENT_ROOT": os.getcwd().replace(os.path.sep, "/"), "GATEWAY_INTERFACE": "CGI/1.1", "QUERY_STRING": self.query, "REMOTE_ADDR": remote_addr, "REMOTE_PORT": str(remote_port), "REQUEST_METHOD": self.method, "REQUEST_URI": self.REQUEST_URI, "SCRIPT_FILENAME": cwd.replace(os.path.sep, "/") + self.SCRIPT_NAME, "SCRIPT_NAME": self.SCRIPT_NAME, "SERVER_ADDR": self.context.SERVER_ADDR, "SERVER_ADMIN": "", "SERVER_NAME": self.context.SERVER_NAME, "SERVER_PORT": str(self.context.SERVER_PORT), "SERVER_PROTOCOL": " HTTP/1.1", "SERVER_SIGNATURE": "", "SERVER_SOFTWARE": "dragonkeeper/%s" % VERSION, } if self.PATH_INFO: environ["PATH_INFO"] = self.PATH_INFO environ["PATH_TRANSLATED"] = cwd + self.PATH_INFO.replace("/", os.path.sep) for header in self.headers: key = "HTTP_%s" % header.upper().replace('-', '_') environ[key] = self.headers[header] script_abs_path = os.path.abspath(self.cgi_script) response_code = 200 response_token = 'OK' stdoutdata = "" stderrdata = "" headers = {} content = "" try: file = open(script_abs_path, 'rb') first_line = file.readline() file.close() except: is_failed = True if not is_failed: if first_line.startswith("#!"): first_line = first_line[2:].strip() else: is_failed = True if not is_failed: p = subprocess.Popen( [first_line, script_abs_path], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=environ, cwd=os.path.split(script_abs_path)[0] ) input = None if self.method == "POST": input = self.raw_post_data stdoutdata, stderrdata = p.communicate(input) if stderrdata: content = "\n". join([ "Error occured in the subprocess", "-------------------------------", "", stderrdata ]) headers['Content-Type'] = 'text/plain' elif stdoutdata: raw_parsed_headers = parse_headers(CRLF + stdoutdata) if raw_parsed_headers: headers_raw, first_line, headers, content = raw_parsed_headers if 'Status' in headers: response_code, response_token = headers.pop('Status').split(' ', 1) else: # assume its html content = stdoutdata headers['Content-Type'] = 'text/html' headers['Content-Length'] = len(content) self.out_buffer += RESPONSE_BASIC % ( response_code, response_token, get_timestamp(), "".join( ["%s: %s\r\n" % (key, headers[key]) for key in headers] + [CRLF, content] ) ) self.timeout = 0
headers_raw, first_line, headers, content = raw_parsed_headers
(headers_raw, first_line, headers, content) = raw_parsed_headers
def handle_cgi(self): import subprocess is_failed = False remote_addr, remote_port = self.socket.getpeername() cwd = os.getcwd() environ = { # os "COMSPEC": os.environ["COMSPEC"], "PATH": os.environ["PATH"], "PATHEXT": os.environ["PATHEXT"], "SYSTEMROOT": os.environ["SYSTEMROOT"], "WINDIR": os.environ["WINDIR"], # server "DOCUMENT_ROOT": os.getcwd().replace(os.path.sep, "/"), "GATEWAY_INTERFACE": "CGI/1.1", "QUERY_STRING": self.query, "REMOTE_ADDR": remote_addr, "REMOTE_PORT": str(remote_port), "REQUEST_METHOD": self.method, "REQUEST_URI": self.REQUEST_URI, "SCRIPT_FILENAME": cwd.replace(os.path.sep, "/") + self.SCRIPT_NAME, "SCRIPT_NAME": self.SCRIPT_NAME, "SERVER_ADDR": self.context.SERVER_ADDR, "SERVER_ADMIN": "", "SERVER_NAME": self.context.SERVER_NAME, "SERVER_PORT": str(self.context.SERVER_PORT), "SERVER_PROTOCOL": " HTTP/1.1", "SERVER_SIGNATURE": "", "SERVER_SOFTWARE": "dragonkeeper/%s" % VERSION, } if self.PATH_INFO: environ["PATH_INFO"] = self.PATH_INFO environ["PATH_TRANSLATED"] = cwd + self.PATH_INFO.replace("/", os.path.sep) for header in self.headers: key = "HTTP_%s" % header.upper().replace('-', '_') environ[key] = self.headers[header] script_abs_path = os.path.abspath(self.cgi_script) response_code = 200 response_token = 'OK' stdoutdata = "" stderrdata = "" headers = {} content = "" try: file = open(script_abs_path, 'rb') first_line = file.readline() file.close() except: is_failed = True if not is_failed: if first_line.startswith("#!"): first_line = first_line[2:].strip() else: is_failed = True if not is_failed: p = subprocess.Popen( [first_line, script_abs_path], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=environ, cwd=os.path.split(script_abs_path)[0] ) input = None if self.method == "POST": input = self.raw_post_data stdoutdata, stderrdata = p.communicate(input) if stderrdata: content = "\n". join([ "Error occured in the subprocess", "-------------------------------", "", stderrdata ]) headers['Content-Type'] = 'text/plain' elif stdoutdata: raw_parsed_headers = parse_headers(CRLF + stdoutdata) if raw_parsed_headers: headers_raw, first_line, headers, content = raw_parsed_headers if 'Status' in headers: response_code, response_token = headers.pop('Status').split(' ', 1) else: # assume its html content = stdoutdata headers['Content-Type'] = 'text/html' headers['Content-Length'] = len(content) self.out_buffer += RESPONSE_BASIC % ( response_code, response_token, get_timestamp(), "".join( ["%s: %s\r\n" % (key, headers[key]) for key in headers] + [CRLF, content] ) ) self.timeout = 0
response_code, response_token = headers.pop('Status').split(' ', 1)
response_code, response_token = \ headers.pop('Status').split(' ', 1)
def handle_cgi(self): import subprocess is_failed = False remote_addr, remote_port = self.socket.getpeername() cwd = os.getcwd() environ = { # os "COMSPEC": os.environ["COMSPEC"], "PATH": os.environ["PATH"], "PATHEXT": os.environ["PATHEXT"], "SYSTEMROOT": os.environ["SYSTEMROOT"], "WINDIR": os.environ["WINDIR"], # server "DOCUMENT_ROOT": os.getcwd().replace(os.path.sep, "/"), "GATEWAY_INTERFACE": "CGI/1.1", "QUERY_STRING": self.query, "REMOTE_ADDR": remote_addr, "REMOTE_PORT": str(remote_port), "REQUEST_METHOD": self.method, "REQUEST_URI": self.REQUEST_URI, "SCRIPT_FILENAME": cwd.replace(os.path.sep, "/") + self.SCRIPT_NAME, "SCRIPT_NAME": self.SCRIPT_NAME, "SERVER_ADDR": self.context.SERVER_ADDR, "SERVER_ADMIN": "", "SERVER_NAME": self.context.SERVER_NAME, "SERVER_PORT": str(self.context.SERVER_PORT), "SERVER_PROTOCOL": " HTTP/1.1", "SERVER_SIGNATURE": "", "SERVER_SOFTWARE": "dragonkeeper/%s" % VERSION, } if self.PATH_INFO: environ["PATH_INFO"] = self.PATH_INFO environ["PATH_TRANSLATED"] = cwd + self.PATH_INFO.replace("/", os.path.sep) for header in self.headers: key = "HTTP_%s" % header.upper().replace('-', '_') environ[key] = self.headers[header] script_abs_path = os.path.abspath(self.cgi_script) response_code = 200 response_token = 'OK' stdoutdata = "" stderrdata = "" headers = {} content = "" try: file = open(script_abs_path, 'rb') first_line = file.readline() file.close() except: is_failed = True if not is_failed: if first_line.startswith("#!"): first_line = first_line[2:].strip() else: is_failed = True if not is_failed: p = subprocess.Popen( [first_line, script_abs_path], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=environ, cwd=os.path.split(script_abs_path)[0] ) input = None if self.method == "POST": input = self.raw_post_data stdoutdata, stderrdata = p.communicate(input) if stderrdata: content = "\n". join([ "Error occured in the subprocess", "-------------------------------", "", stderrdata ]) headers['Content-Type'] = 'text/plain' elif stdoutdata: raw_parsed_headers = parse_headers(CRLF + stdoutdata) if raw_parsed_headers: headers_raw, first_line, headers, content = raw_parsed_headers if 'Status' in headers: response_code, response_token = headers.pop('Status').split(' ', 1) else: # assume its html content = stdoutdata headers['Content-Type'] = 'text/html' headers['Content-Length'] = len(content) self.out_buffer += RESPONSE_BASIC % ( response_code, response_token, get_timestamp(), "".join( ["%s: %s\r\n" % (key, headers[key]) for key in headers] + [CRLF, content] ) ) self.timeout = 0
if problem_data["BCs"] != "null":
if problem_data["BCs"]:
def calc_global(problem_data): """ Calculates global stiffness matrix, assembly of elemental systems are included here instead of defining an extra function for assembly """ print("Calculating global system...") global NEN, NEN_range, functions, a, V1, V2, c, f, shape_funcs #Defining global variables NEN = problem_data["NEN"] NEN_range = range(NEN) #Taking coefficient functions of DE out of problem data functions = problem_data["functions"] a = functions["a"] V1 = functions["V1"] V2 = functions["V2"] c = functions["c"] f = functions["f"] #Defining shape functions shape_funcs = problem_data["shapefunc"] print(" * Creating matrixes...") NN = problem_data["NN"] K = sparse.lil_matrix((NN, NN)) F = zeros((NN, 1)) print(" * Calculating K and F matrixes...") for e_nodes in problem_data["LtoG"]: Ke, Fe = calc_elem(problem_data, e_nodes) for i, node_i in enumerate(e_nodes): F[node_i] += Fe[i] for j, node_j in enumerate(e_nodes): K[node_i, node_j] += Ke[i][j] print(" * Freeing up memory (1/2)...") del problem_data["GQ"] del problem_data["UV"] del problem_data["functions"] if problem_data["BCs"] != "null": K, F = apply_bc(problem_data, K, F) print (" * Freeing up memory (2/2)...") del problem_data["LtoG"] del problem_data["BCs"] print(" * Converting LIL to CSR format...") K = K.tocsr() return K, F
get_supported_packages = urllib2.urlopen(url=packages_url)
try: get_supported_packages = urllib2.urlopen(url=packages_url) except urllib2.HTTPError, e: self._repository_status['support_status'] = "" self._repository_status['details'] = "" if e.code == 500: self._repository_status['support_status'] = \ ("Error connecting to repository (server side issues)") else: print str(e) self._push_button.setEnabled(False) self.update_push_information() return
def check_dependencies(self): """ determines if current VisTrail will be supported by the repository's VisTrail server """
self._unrunnable_wfs = {}
def check_dependencies(self): """ determines if current VisTrail will be supported by the repository's VisTrail server """
'branch_from': "" if not branching else repository_vt_id,
'branched_from': "" if not branching else repository_vt_id,
def push_vistrail_to_repository(self, branching=False): """ uploads current VisTrail to web repository """
except:
except Exception, e: log(str(e))
def check_url(url): try: p = urlparse(url) h = HTTP(p[1]) h.putrequest('HEAD', p[2]) h.endheaders() if h.getreply()[0] == 200: return True else: return False except: return False
elif check_url(path_to_vistrails):
elif (not build_always or (build_always and check_url(path_to_vistrails))):
def check_url(url): try: p = urlparse(url) h = HTTP(p[1]) h.putrequest('HEAD', p[2]) h.endheaders() if h.getreply()[0] == 200: return True else: return False except: return False
generate_latex_error("%s is not a valid url nor a valid path to vistrails.py" %\ (path_to_vistrails)))
generate_latex_error("It is possible that %s is not a valid \ url nor a valid path to vistrails.py or that you don't have an internet connection \ and some workflows have the buildalways option. If you already have cached files, \ try removing the buildalways option from vistrails latex command" %\ (path_to_vistrails)))
def check_url(url): try: p = urlparse(url) h = HTTP(p[1]) h.putrequest('HEAD', p[2]) h.endheaders() if h.getreply()[0] == 200: return True else: return False except: return False
r = pkg.report_missing_module(mname, mnamespace)
r = None if pkg.can_handle_missing_modules(): r = pkg.handle_missing_module(controller, module_id, pipeline) d = get_descriptor(mpkg, mname, mnamespace)
def attempt_automatic_upgrade(controller, pipeline, module_id): """attempt_automatic_upgrade(module_id, pipeline): [Action]
d = get_descriptor(mpkg, mname, mnamespace)
def attempt_automatic_upgrade(controller, pipeline, module_id): """attempt_automatic_upgrade(module_id, pipeline): [Action]
try: port_type = PortSpec.port_type_map.inverse[port.type] s = reg.get_port_spec_from_descriptor(d, port.name, port_type) except Exception, e: import traceback traceback.print_exc() msg = ("%s connection to port %s of module %s " "does not exist." % \ (PortSpec.port_type_map.inverse[port.type], port.name, invalid_module.name)) raise UpgradeWorkflowError(msg)
port_type = PortSpec.port_type_map.inverse[port.type] UpgradeWorkflowHandler.check_port_spec(invalid_module, port.name, port_type, d, port.sigstring)
def check_connection_port(port): try: port_type = PortSpec.port_type_map.inverse[port.type] s = reg.get_port_spec_from_descriptor(d, port.name, port_type)
try: reg_spec = reg.get_port_spec_from_descriptor(d, function.name, 'input') except: raise UpgradeWorkflowError('cannot find function "%s" for' 'upgrade' % function.name) if reg_spec.sigstring != function.sigstring: raise UpgradeWorkflowError('mismatch on function "%s"' % \ function.name)
UpgradeWorkflowHandler.check_port_spec(invalid_module, function.name, 'input', d, function.sigstring)
def check_connection_port(port): try: port_type = PortSpec.port_type_map.inverse[port.type] s = reg.get_port_spec_from_descriptor(d, port.name, port_type)
output_port_spec = \ src_module.get_port_spec(src_port, 'output')
if ((src_port, 'output')) in local_port_specs: output_port_spec = local_port_specs[(src_port, 'output')] else: output_port_spec = \ src_module.get_port_spec(src_port, 'output')
def create_new_connection(src_module, src_port, dst_module, dst_port): # spec -> name, type, signature output_port_id = controller.id_scope.getNewId(Port.vtType) if type(src_port) == type(""): output_port_spec = \ src_module.get_port_spec(src_port, 'output') output_port = Port(id=output_port_id, spec=output_port_spec, moduleId=src_module.id, moduleName=src_module.name) else: output_port = Port(id=output_port_id, name=src_port.name, type=src_port.type, signature=src_port.signature, moduleId=src_module.id, moduleName=src_module.name)
input_port_spec = \ dst_module.get_port_spec(dst_port, 'input')
if ((dst_port, 'input')) in local_port_specs: input_port_spec = local_port_specs[(dst_port, 'input')] else: input_port_spec = \ dst_module.get_port_spec(dst_port, 'input')
def create_new_connection(src_module, src_port, dst_module, dst_port): # spec -> name, type, signature output_port_id = controller.id_scope.getNewId(Port.vtType) if type(src_port) == type(""): output_port_spec = \ src_module.get_port_spec(src_port, 'output') output_port = Port(id=output_port_id, spec=output_port_spec, moduleId=src_module.id, moduleName=src_module.name) else: output_port = Port(id=output_port_id, name=src_port.name, type=src_port.type, signature=src_port.signature, moduleId=src_module.id, moduleName=src_module.name)
self.setWindowTitle('VisTrails messages')
self.setWindowTitle('VisTrails Messages')
def __init__(self, parent = None): QtGui.QDialog.__init__(self, parent) core.debug.DebugPrint.getInstance().set_stream(debugStream(self.write)) self.setWindowTitle('VisTrails messages') layout = QtGui.QVBoxLayout() self.setLayout(layout) self.list = QtGui.QListWidget() self.connect(self.list, QtCore.SIGNAL('itemDoubleClicked(QListWidgetItem *)'), self.showMessage) self.msg_box = None
QtCore.SIGNAL('itemDoubleClicked(QListWidgetItem *)'),
QtCore.SIGNAL('currentItemChanged(QListWidgetItem *, QListWidgetItem *)'),
def __init__(self, parent = None): QtGui.QDialog.__init__(self, parent) core.debug.DebugPrint.getInstance().set_stream(debugStream(self.write)) self.setWindowTitle('VisTrails messages') layout = QtGui.QVBoxLayout() self.setLayout(layout) self.list = QtGui.QListWidget() self.connect(self.list, QtCore.SIGNAL('itemDoubleClicked(QListWidgetItem *)'), self.showMessage) self.msg_box = None
self.msg_box = None self.resize(700, 400)
def __init__(self, parent = None): QtGui.QDialog.__init__(self, parent) core.debug.DebugPrint.getInstance().set_stream(debugStream(self.write)) self.setWindowTitle('VisTrails messages') layout = QtGui.QVBoxLayout() self.setLayout(layout) self.list = QtGui.QListWidget() self.connect(self.list, QtCore.SIGNAL('itemDoubleClicked(QListWidgetItem *)'), self.showMessage) self.msg_box = None
buttons = QtGui.QHBoxLayout()
self.text = QtGui.QTextEdit() self.text.setReadOnly(True) self.text.hide() layout.addWidget(self.text) buttons = QtGui.QGridLayout()
def __init__(self, parent = None): QtGui.QDialog.__init__(self, parent) core.debug.DebugPrint.getInstance().set_stream(debugStream(self.write)) self.setWindowTitle('VisTrails messages') layout = QtGui.QVBoxLayout() self.setLayout(layout) self.list = QtGui.QListWidget() self.connect(self.list, QtCore.SIGNAL('itemDoubleClicked(QListWidgetItem *)'), self.showMessage) self.msg_box = None
close = QtGui.QPushButton('&Hide', self)
leftbuttons = QtGui.QGridLayout() buttons.addLayout(leftbuttons, 0, 0, QtCore.Qt.AlignLeft) rightbuttons = QtGui.QGridLayout() buttons.addLayout(rightbuttons, 0, 1, QtCore.Qt.AlignRight) close = QtGui.QPushButton('&Close', self)
def __init__(self, parent = None): QtGui.QDialog.__init__(self, parent) core.debug.DebugPrint.getInstance().set_stream(debugStream(self.write)) self.setWindowTitle('VisTrails messages') layout = QtGui.QVBoxLayout() self.setLayout(layout) self.list = QtGui.QListWidget() self.connect(self.list, QtCore.SIGNAL('itemDoubleClicked(QListWidgetItem *)'), self.showMessage) self.msg_box = None
buttons.addWidget(close)
leftbuttons.addWidget(close, 0, 0)
def __init__(self, parent = None): QtGui.QDialog.__init__(self, parent) core.debug.DebugPrint.getInstance().set_stream(debugStream(self.write)) self.setWindowTitle('VisTrails messages') layout = QtGui.QVBoxLayout() self.setLayout(layout) self.list = QtGui.QListWidget() self.connect(self.list, QtCore.SIGNAL('itemDoubleClicked(QListWidgetItem *)'), self.showMessage) self.msg_box = None
details = QtGui.QPushButton('&Show details', self) details.setFixedWidth(120) buttons.addWidget(details) self.connect(details, QtCore.SIGNAL('clicked()'), self.details)
def __init__(self, parent = None): QtGui.QDialog.__init__(self, parent) core.debug.DebugPrint.getInstance().set_stream(debugStream(self.write)) self.setWindowTitle('VisTrails messages') layout = QtGui.QVBoxLayout() self.setLayout(layout) self.list = QtGui.QListWidget() self.connect(self.list, QtCore.SIGNAL('itemDoubleClicked(QListWidgetItem *)'), self.showMessage) self.msg_box = None
buttons.addWidget(copy)
rightbuttons.addWidget(copy, 0, 0)
def __init__(self, parent = None): QtGui.QDialog.__init__(self, parent) core.debug.DebugPrint.getInstance().set_stream(debugStream(self.write)) self.setWindowTitle('VisTrails messages') layout = QtGui.QVBoxLayout() self.setLayout(layout) self.list = QtGui.QListWidget() self.connect(self.list, QtCore.SIGNAL('itemDoubleClicked(QListWidgetItem *)'), self.showMessage) self.msg_box = None
buttons.addWidget(copyAll)
rightbuttons.addWidget(copyAll, 0, 1)
def __init__(self, parent = None): QtGui.QDialog.__init__(self, parent) core.debug.DebugPrint.getInstance().set_stream(debugStream(self.write)) self.setWindowTitle('VisTrails messages') layout = QtGui.QVBoxLayout() self.setLayout(layout) self.list = QtGui.QListWidget() self.connect(self.list, QtCore.SIGNAL('itemDoubleClicked(QListWidgetItem *)'), self.showMessage) self.msg_box = None
def details(self): """ call showMessage on selected message """ items = self.list.selectedItems() if len(items)>0: self.showMessage(items[0])
self.msg_box = None self.itemQueue = [] self.resize(700, 400)
def __init__(self, parent = None): QtGui.QDialog.__init__(self, parent) core.debug.DebugPrint.getInstance().set_stream(debugStream(self.write)) self.setWindowTitle('VisTrails messages') layout = QtGui.QVBoxLayout() self.setLayout(layout) self.list = QtGui.QListWidget() self.connect(self.list, QtCore.SIGNAL('itemDoubleClicked(QListWidgetItem *)'), self.showMessage) self.msg_box = None
def showMessage(self, item):
def showMessage(self, item, olditem):
def showMessage(self, item): """ show item data in a messagebox """ self.showMessageBox(str(item.data(32).toString()))
self.showMessageBox(str(item.data(32).toString()))
s = str(item.data(32).toString()) msgs = s.split('\n') msgs = [cgi.escape(i) for i in msgs] format = {'INFO': 'Message:', 'WARNING': 'Warning message:', 'CRITICAL': 'Critical message:'} text = '<HTML><BODY BGCOLOR=" text += '<H4>%s</H4>' % format.get(msgs[0], 'Message:') text += '<H4>%s<br></H4>' % msgs[3] text += '<table border="0">' if len(msgs)>4: text += '<tr><td>&nbsp;</td><td align=left>%s</td></tr>' % '<br>'.join(msgs[4:]) text += '<tr><td>&nbsp;</td><td>&nbsp;</td></tr>' text += '<tr><td align=right><b>Time:</b></td><td>%s</td></tr>' % msgs[1] text += '<tr><td align=right><b>Location:</b></td><td>%s</td></tr>' % msgs[2] text += '</table></BODY></HTML>' self.text.setHtml(text) self.text.show()
def showMessage(self, item): """ show item data in a messagebox """ self.showMessageBox(str(item.data(32).toString()))
def showMessageBox(self, s): s = str(s).strip()
def updateMessageBox(self, item): self.currentItem = item msg_box = self.msg_box s = str(item.data(32).toString())
def showMessageBox(self, s): s = str(s).strip() msgs = s.split('\n') if self.msg_box and self.msg_box.isVisible(): self.msg_box.close() msg_box = QtGui.QMessageBox(self.parent()) self.msg_box = msg_box if msgs[0] == "INFO": msg_box.setIcon(QtGui.QMessageBox.Information) msg_box.setWindowTitle("Information") elif msgs[0] == "WARNING": msg_box.setIcon(QtGui.QMessageBox.Warning) msg_box.setWindowTitle("Warning") elif msgs[0] == "CRITICAL": msg_box.setIcon(QtGui.QMessageBox.Critical) msg_box.setWindowTitle("Critical error") msg_box.setText(msgs[3]) text = "Time: %s\n Location: %s\n Message:\n%s" % \ (msgs[1], msgs[2], '\n'.join(msgs[3:])) msg_box.setInformativeText('\n'.join(msgs[4:])) msg_box.setStandardButtons(QtGui.QMessageBox.Ok) msg_box.setDefaultButton(QtGui.QMessageBox.Ok) msg_box.setDetailedText(text) msg_box.show()
if self.msg_box and self.msg_box.isVisible(): self.msg_box.close() msg_box = QtGui.QMessageBox(self.parent()) self.msg_box = msg_box
def showMessageBox(self, s): s = str(s).strip() msgs = s.split('\n') if self.msg_box and self.msg_box.isVisible(): self.msg_box.close() msg_box = QtGui.QMessageBox(self.parent()) self.msg_box = msg_box if msgs[0] == "INFO": msg_box.setIcon(QtGui.QMessageBox.Information) msg_box.setWindowTitle("Information") elif msgs[0] == "WARNING": msg_box.setIcon(QtGui.QMessageBox.Warning) msg_box.setWindowTitle("Warning") elif msgs[0] == "CRITICAL": msg_box.setIcon(QtGui.QMessageBox.Critical) msg_box.setWindowTitle("Critical error") msg_box.setText(msgs[3]) text = "Time: %s\n Location: %s\n Message:\n%s" % \ (msgs[1], msgs[2], '\n'.join(msgs[3:])) msg_box.setInformativeText('\n'.join(msgs[4:])) msg_box.setStandardButtons(QtGui.QMessageBox.Ok) msg_box.setDefaultButton(QtGui.QMessageBox.Ok) msg_box.setDetailedText(text) msg_box.show()
text = "Time: %s\n Location: %s\n Message:\n%s" % \ (msgs[1], msgs[2], '\n'.join(msgs[3:])) msg_box.setInformativeText('\n'.join(msgs[4:])) msg_box.setStandardButtons(QtGui.QMessageBox.Ok) msg_box.setDefaultButton(QtGui.QMessageBox.Ok) msg_box.setDetailedText(text) msg_box.show()
def showMessageBox(self, item): """ Displays the current message in a messagebox if a message is already shown the same message is shown again but with a "next message"-button """ msg_box = self.msg_box if not msg_box or not msg_box.isVisible(): self.old_msg_box = msg_box msg_box = QtGui.QMessageBox(self.parent()) self.msg_box = msg_box msg_box.setStandardButtons(QtGui.QMessageBox.Ok) msg_box.setDefaultButton(QtGui.QMessageBox.Ok) msg_box.setEscapeButton(QtGui.QMessageBox.Ok) msg_box.addButton('&Show Messages', msg_box.RejectRole) self.manyButton = None self.connect(msg_box, QtCore.SIGNAL('buttonClicked(QAbstractButton *)'), self.messageButtonClicked) self.updateMessageBox(item) else: self.itemQueue.append(item) if self.itemQueue: many = len(self.itemQueue) text = '&Next Message (%s more)' % many if not self.manyButton: self.manyButton=QtGui.QPushButton(text) msg_box.addButton(self.manyButton, msg_box.DestructiveRole) else: self.manyButton.setText(text) else: if self.manyButton: print "removing", self.manyButton msg_box.removeButton(self.manyButton) self.manyButton = None if not msg_box.isVisible(): msg_box.show() msg_box.resize(msg_box.sizeHint()) msg_box.updateGeometry() msg_box.activateWindow() msg_box.raise_() def messageButtonClicked(self, button): role = self.msg_box.buttonRole(button) if role == self.msg_box.RejectRole: self.itemQueue = [] self.show() self.list.setCurrentItem(self.currentItem) self.list.scrollToItem(self.currentItem) elif role == self.msg_box.DestructiveRole: item = self.itemQueue[0] del self.itemQueue[0] self.showMessageBox(item) else: self.itemQueue = []
def showMessageBox(self, s): s = str(s).strip() msgs = s.split('\n') if self.msg_box and self.msg_box.isVisible(): self.msg_box.close() msg_box = QtGui.QMessageBox(self.parent()) self.msg_box = msg_box if msgs[0] == "INFO": msg_box.setIcon(QtGui.QMessageBox.Information) msg_box.setWindowTitle("Information") elif msgs[0] == "WARNING": msg_box.setIcon(QtGui.QMessageBox.Warning) msg_box.setWindowTitle("Warning") elif msgs[0] == "CRITICAL": msg_box.setIcon(QtGui.QMessageBox.Critical) msg_box.setWindowTitle("Critical error") msg_box.setText(msgs[3]) text = "Time: %s\n Location: %s\n Message:\n%s" % \ (msgs[1], msgs[2], '\n'.join(msgs[3:])) msg_box.setInformativeText('\n'.join(msgs[4:])) msg_box.setStandardButtons(QtGui.QMessageBox.Ok) msg_box.setDefaultButton(QtGui.QMessageBox.Ok) msg_box.setDetailedText(text) msg_box.show()
if msgs[0] == "CRITICAL": self.showMessageBox(s)
def write(self, s):
count = self.list.count() if count: self.list.scrollToItem(self.list.item(count-1))
def showEvent(self, e): """closeEvent(e) -> None Event handler called when the dialog is about to close.""" count = self.list.count() if count: self.list.scrollToItem(self.list.item(count-1)) self.emit(QtCore.SIGNAL("messagesView(bool)"), True)
abstraction_files = [] thumbnail_files = [] vistrail = None if locator is None: vistrail = Vistrail()
from core.vistrail.vistrail import Vistrail abstraction_files = [] thumbnail_files = [] vistrail = None if locator is None: vistrail = Vistrail() else: res = locator.load() if type(res) == type(SaveBundle(None)): vistrail = res.vistrail abstraction_files.extend(res.abstractions) thumbnail_files.extend(res.thumbnails)
def load_vistrail(locator, is_abstraction=False): abstraction_files = [] thumbnail_files = [] vistrail = None if locator is None: vistrail = Vistrail() else: res = locator.load() if type(res) == type(SaveBundle(None)): vistrail = res.vistrail abstraction_files.extend(res.abstractions) thumbnail_files.extend(res.thumbnails) else: vistrail = res vistrail.is_abstraction = is_abstraction return (vistrail, abstraction_files, thumbnail_files)
res = locator.load() if type(res) == type(SaveBundle(None)): vistrail = res.vistrail abstraction_files.extend(res.abstractions) thumbnail_files.extend(res.thumbnails) else: vistrail = res vistrail.is_abstraction = is_abstraction return (vistrail, abstraction_files, thumbnail_files)
vistrail = res vistrail.is_abstraction = is_abstraction return (vistrail, abstraction_files, thumbnail_files)
def load_vistrail(locator, is_abstraction=False): abstraction_files = [] thumbnail_files = [] vistrail = None if locator is None: vistrail = Vistrail() else: res = locator.load() if type(res) == type(SaveBundle(None)): vistrail = res.vistrail abstraction_files.extend(res.abstractions) thumbnail_files.extend(res.thumbnails) else: vistrail = res vistrail.is_abstraction = is_abstraction return (vistrail, abstraction_files, thumbnail_files)
def is_image_stale(filename, host, port, db_name, vt_id): statinfo = os.stat(filename) image_time = datetime.fromtimestamp(statinfo.st_mtime) locator = DBLocator(host=host, port=int(port), database=db_name, user=db_read_user, passwd=db_read_pass, obj_id=int(vt_id), obj_type=None, connection_id=None) vt_mod_time = locator.get_db_modification_time() self.server_logger.info("image time: %s, vt time: %s"%(image_time, vt_mod_time)) if image_time < vt_mod_time: return True else: return False
def get_vt_graph_png(self, host, port, db_name, vt_id, is_local=True): """get_vt_graph_png(host:str, port: str, db_name: str, vt_id:str) -> str Returns the relative url of the generated image """
is_image_stale(filename, host, port, db_name, vt_id)) and
self._is_image_stale(filename, host, port, db_name, vt_id)) and
def is_image_stale(filename, host, port, db_name, vt_id): statinfo = os.stat(filename) image_time = datetime.fromtimestamp(statinfo.st_mtime) locator = DBLocator(host=host, port=int(port), database=db_name, user=db_read_user, passwd=db_read_pass, obj_id=int(vt_id), obj_type=None, connection_id=None) vt_mod_time = locator.get_db_modification_time() self.server_logger.info("image time: %s, vt time: %s"%(image_time, vt_mod_time)) if image_time < vt_mod_time: return True else: return False
is_image_stale(filename, host, port, db_name, vt_id)):
self._is_image_stale(filename, host, port, db_name, vt_id)):
def is_image_stale(filename, host, port, db_name, vt_id): statinfo = os.stat(filename) image_time = datetime.fromtimestamp(statinfo.st_mtime) locator = DBLocator(host=host, port=int(port), database=db_name, user=db_read_user, passwd=db_read_pass, obj_id=int(vt_id), obj_type=None, connection_id=None) vt_mod_time = locator.get_db_modification_time() self.server_logger.info("image time: %s, vt time: %s"%(image_time, vt_mod_time)) if image_time < vt_mod_time: return True else: return False
os.path.exists(filepath) and not os.path.exists(filename)) and self.proxies_queue is not None):
(os.path.exists(filepath) and not os.path.exists(filename)) or self._is_image_stale(filename, host, port, db_name, vt_id)) and self.proxies_queue is not None):
def get_vt_graph_pdf(self, host, port, db_name, vt_id, is_local=True): """get_vt_graph_pdf(host:str, port: str, db_name: str, vt_id:str) -> str Returns the relative url of the generated image """
if not os.path.exists(filepath):
if (not os.path.exists(filepath) or (os.path.exists(filepath) and not os.path.exists(filename)) or self._is_image_stale(filename, host, port, db_name, vt_id)): if os.path.exists(filepath): shutil.rmtree(filepath)
def get_vt_graph_pdf(self, host, port, db_name, vt_id, is_local=True): """get_vt_graph_pdf(host:str, port: str, db_name: str, vt_id:str) -> str Returns the relative url of the generated image """
if not os.path.exists(filename):
def get_vt_graph_pdf(self, host, port, db_name, vt_id, is_local=True): """get_vt_graph_pdf(host:str, port: str, db_name: str, vt_id:str) -> str Returns the relative url of the generated image """
if (e.modifiers() & QtCore.Qt.ControlModifier or e.modifiers() & QtCore.Qt.ShiftModifier): e.ignore()
if e.modifiers() & QtCore.Qt.ControlModifier: if key == QtCore.Qt.Key_C or key == QtCore.Qt.Key_Insert: self.copy() elif key == QtCore.Qt.Key_V: cursor = self.textCursor() cursor.movePosition(QtGui.QTextCursor.End) cursor.clearSelection() self.setTextCursor(cursor) self.paste() elif key == QtCore.Qt.Key_A: self.selectAll() self.selectMode = True else: e.ignore()
def keyPressEvent(self, e): """keyPressEvent(e) -> None Handle user input a key at a time.
def mousePressEvent(self, e): """mousePressEvent(e) -> None Keep the cursor after the last prompt. """ if e.button() == QtCore.Qt.LeftButton: self.selectMode = True QtGui.QTextEdit.mousePressEvent(self, e)
def focusNextPrevChild(self, next): """focusNextPrevChild(next) -> None Suppress tabbing to the next window in multi-line commands. """ if next and self.more: return 0 return QtGui.QTextEdit.focusNextPrevChild(self, next)
return
# def mousePressEvent(self, e):
self.flush_move_actions()
added_moves = self.flush_move_actions()
def flush_delayed_actions(self): start_version = self.current_version desc_key = Action.ANNOTATION_DESCRIPTION for action in self._delayed_actions: self.vistrail.add_action(action, start_version, self.current_session) # HACK to populate upgrade information if (action.has_annotation_with_key(desc_key) and action.get_annotation_by_key(desc_key).value == 'Upgrade'): self.vistrail.set_upgrade(start_version, str(action.id)) self.current_version = action.id start_version = action.id
translate_dict = {'DBGroup': {'workflow': update_workflow}}
translate_dict = {'DBGroup': {'workflow': update_workflow}, 'DBAction': {'operations': update_operations}}
def update_workflow(old_obj, translate_dict): return DBWorkflow.update_version(old_obj.db_workflow, translate_dict, DBWorkflow())
translate_dict = {'DBGroup': {'workflow': update_workflow}}
translate_dict = {'DBGroup': {'workflow': update_workflow}, 'DBWorkflow': {'modules': update_modules}}
def update_workflow(old_obj, translate_dict): return DBWorkflow.update_version(old_obj.db_workflow, translate_dict, DBWorkflow())
QtCore.SIGNAL('itemClicked(QListWidgetItem *)'), self.updateDBObjectsList) self.connect(self.connectionList,
def connectSignals(self): """ connectSignals() -> None Map signals between GUI components """ self.connect(self.cancelButton, QtCore.SIGNAL('clicked()'), self.reject) self.connect(self.openButton, QtCore.SIGNAL('clicked()'), self.accept) self.connect(self.addAct, QtCore.SIGNAL('triggered()'), self.showConnConfig) self.connect(self.removeAct, QtCore.SIGNAL('triggered()'), self.connectionList.removeConnection) self.connect(self.connectionList, QtCore.SIGNAL('itemSelectionChanged()'), self.updateDBObjectsList) self.connect(self.connectionList, QtCore.SIGNAL('itemSelectionChanged()'), self.updateButtons) self.connect(self.connectionList, QtCore.SIGNAL('itemClicked(QListWidgetItem *)'), self.updateDBObjectsList) self.connect(self.connectionList, QtCore.SIGNAL("reloadConnections"), self.updateDBObjectsList) self.connect(self.objectList, QtCore.SIGNAL('itemSelectionChanged()'), self.updateButtons) self.connect(self.saveasEdt, QtCore.SIGNAL('textChanged(QString)'), self.updateButtons) self.connect(self.objectList, QtCore.SIGNAL('itemDoubleClicked(QListWidgetItem *)'), self.accept)
app = gui.application.VistrailsApplication
def __init__(self, parent): QToolWindow.__init__(self, parent=parent) #locals() returns the original dictionary, not a copy as #the docs say app = gui.application.VistrailsApplication self.firstLocals = copy.copy(locals()) self.shell = QShell(self.firstLocals,None) self.setWidget(self.shell) self.setWindowTitle(self.shell.windowTitle()) self.monitorWindowTitle(self.shell) self.vistrails_interpreter = get_default_interpreter()
app = gui.application.VistrailsApplication shell_conf = app.configuration.shell
conf = get_vistrails_configuration() shell_conf = conf.shell
def __init__(self, locals=None, parent=None): """Constructor.
self.setCurrentFont(font)
self.setFont(font)
def __init__(self, locals=None, parent=None): """Constructor.
isoutdated = httpfile.is_outdated(remoteHeader, localFile)
isoutdated = httpfile._is_outdated(remoteHeader, localFile)
def verify_wsdl(wsdlList): """verify_wsdl(wsdlList: list of urls) -> (list,list,list) This checks for the wsdls that need to be updated or the files need to be generated and splits them in 3 lists: files that are outdated, updated and ones that an error was generated. """ outdated_list = [] updated_list = [] error_list = [] for w in wsdlList: if w == '': continue try: s = w.split('/') host = s[2] except: msg = "Malformed URL." error_list.append((w,msg)) continue location = w reader = WSDLTools.WSDLReader() load = reader.loadFromURL try: wsdl = load(location) except Exception, e: msg = "Couldn't load wsdl from the web: %s."%str(e) error_list.append((w,msg)) continue directoryname = urllib.quote_plus(w) directoryname = directoryname.replace(".","_") directoryname = directoryname.replace("%","_") directoryname = directoryname.replace("+","_") package_subdirectory = os.path.join(core.system.default_dot_vistrails(), "webServices", directoryname) wsm = WriteServiceModule(wsdl) client_mod = wsm.getClientModuleName() client_file = os.path.join(package_subdirectory, '%s.py' %client_mod) conn = httplib.HTTPConnection(host) filename = '/' + '/'.join(s[3:]) request = conn.request("GET", filename) response = conn.getresponse() remoteHeader = response.msg.getheader('last-modified') isoutdated = False if remoteHeader != None: localFile = client_file reg = core.modules.module_registry.get_module_registry() httpfile = reg.get_descriptor_by_name('edu.utah.sci.vistrails.http', 'HTTPFile').module() try: isoutdated = httpfile.is_outdated(remoteHeader, localFile) except OSError: print "File doesn't exist" isoutdated = True if isoutdated or remoteHeader == None: outdated_list.append(w) else: updated_list.append(w) return (outdated_list,updated_list, error_list)
qt_super(QVersionTreeScene, self).keyPressEvent(event)
else: qt_super(QVersionTreeScene, self).keyPressEvent(event)
def keyPressEvent(self, event): """ keyPressEvent(event: QKeyEvent) -> None Capture 'Del', 'Backspace' for pruning versions when not editing a tag """ selectedItems = self.selectedItems() versions = [item.id for item in selectedItems if type(item)==QGraphicsVersionItem and not item.text.hasFocus()] if (self.controller and len(versions)>0 and event.key() in [QtCore.Qt.Key_Backspace, QtCore.Qt.Key_Delete]): versions = [item.id for item in selectedItems] res = gui.utils.show_question("VisTrails", "Are you sure that you want to " "prune the selected version(s)?", [gui.utils.YES_BUTTON, gui.utils.NO_BUTTON], gui.utils.NO_BUTTON) if res == gui.utils.YES_BUTTON: self.controller.prune_versions(versions) qt_super(QVersionTreeScene, self).keyPressEvent(event)
def new_abstraction(name, vistrail, vt_fname=None, internal_version=-1L):
def new_abstraction(name, vistrail, vt_fname=None, internal_version=-1L, pipeline=None):
def new_abstraction(name, vistrail, vt_fname=None, internal_version=-1L): """make_abstraction(name: str, vistrail: (str or Vistrail), registry: ModuleRegistry, vt_fname: str, internal_version: long) -> type Creates a new VisTrails module that is a subclass of Abstraction according to the vistrail file provided and the version. The version can either be a tag (string) or an id (long) """ if type(vistrail) == type(""): vt_fname = vistrail vistrail = read_vistrail(vistrail) elif vt_fname is None: raise VistrailsInternalError("Abstraction must provide " "vt_fname with vistrail") if internal_version == -1L: internal_version = vistrail.get_latest_version() action = vistrail.actionMap[internal_version] pipeline = vistrail.getPipeline(internal_version) # try to make the subworkflow work with the package versions we have pipeline.ensure_modules_are_on_registry() pipeline.ensure_connection_specs() uuid = vistrail.get_annotation('__abstraction_uuid__').value if action.notes is not None: docstring = action.notes else: docstring = None d = {} input_modules = [] output_modules = [] for module in pipeline.module_list: #FIXME make this compare more robust if module.name == 'InputPort' and \ module.package == 'edu.utah.sci.vistrails.basic': input_modules.append(module) elif module.name == 'OutputPort' and \ module.package == 'edu.utah.sci.vistrails.basic': output_modules.append(module) input_ports = [] output_ports = [] input_remap = {} output_remap = {} for module in input_modules: (port_name, sigstring, optional, _) = \ get_port_spec_info(pipeline, module) input_ports.append((port_name, sigstring, optional)) input_remap[port_name] = module for module in output_modules: (port_name, sigstring, optional, _) = \ get_port_spec_info(pipeline, module) output_ports.append((port_name, sigstring, optional)) output_remap[port_name] = module # necessary for group d['_input_ports'] = input_ports d['_output_ports'] = output_ports d['input_remap'] = input_remap d['output_remap'] = output_remap d['pipeline'] = pipeline # abstraction specific d['vt_fname'] = vt_fname d['vistrail'] = vistrail d['internal_version'] = internal_version d['uuid'] = uuid # print "input_ports", d['_input_ports'] # print "output_ports", d['_output_ports'] return new_module(Abstraction, name, d, docstring)
internal_version: long) -> type
internal_version: long, pipeline: Pipeline) -> type
def new_abstraction(name, vistrail, vt_fname=None, internal_version=-1L): """make_abstraction(name: str, vistrail: (str or Vistrail), registry: ModuleRegistry, vt_fname: str, internal_version: long) -> type Creates a new VisTrails module that is a subclass of Abstraction according to the vistrail file provided and the version. The version can either be a tag (string) or an id (long) """ if type(vistrail) == type(""): vt_fname = vistrail vistrail = read_vistrail(vistrail) elif vt_fname is None: raise VistrailsInternalError("Abstraction must provide " "vt_fname with vistrail") if internal_version == -1L: internal_version = vistrail.get_latest_version() action = vistrail.actionMap[internal_version] pipeline = vistrail.getPipeline(internal_version) # try to make the subworkflow work with the package versions we have pipeline.ensure_modules_are_on_registry() pipeline.ensure_connection_specs() uuid = vistrail.get_annotation('__abstraction_uuid__').value if action.notes is not None: docstring = action.notes else: docstring = None d = {} input_modules = [] output_modules = [] for module in pipeline.module_list: #FIXME make this compare more robust if module.name == 'InputPort' and \ module.package == 'edu.utah.sci.vistrails.basic': input_modules.append(module) elif module.name == 'OutputPort' and \ module.package == 'edu.utah.sci.vistrails.basic': output_modules.append(module) input_ports = [] output_ports = [] input_remap = {} output_remap = {} for module in input_modules: (port_name, sigstring, optional, _) = \ get_port_spec_info(pipeline, module) input_ports.append((port_name, sigstring, optional)) input_remap[port_name] = module for module in output_modules: (port_name, sigstring, optional, _) = \ get_port_spec_info(pipeline, module) output_ports.append((port_name, sigstring, optional)) output_remap[port_name] = module # necessary for group d['_input_ports'] = input_ports d['_output_ports'] = output_ports d['input_remap'] = input_remap d['output_remap'] = output_remap d['pipeline'] = pipeline # abstraction specific d['vt_fname'] = vt_fname d['vistrail'] = vistrail d['internal_version'] = internal_version d['uuid'] = uuid # print "input_ports", d['_input_ports'] # print "output_ports", d['_output_ports'] return new_module(Abstraction, name, d, docstring)
pipeline = vistrail.getPipeline(internal_version) pipeline.ensure_modules_are_on_registry() pipeline.ensure_connection_specs()
if pipeline is None: pipeline = vistrail.getPipeline(internal_version) pipeline.validate()
def new_abstraction(name, vistrail, vt_fname=None, internal_version=-1L): """make_abstraction(name: str, vistrail: (str or Vistrail), registry: ModuleRegistry, vt_fname: str, internal_version: long) -> type Creates a new VisTrails module that is a subclass of Abstraction according to the vistrail file provided and the version. The version can either be a tag (string) or an id (long) """ if type(vistrail) == type(""): vt_fname = vistrail vistrail = read_vistrail(vistrail) elif vt_fname is None: raise VistrailsInternalError("Abstraction must provide " "vt_fname with vistrail") if internal_version == -1L: internal_version = vistrail.get_latest_version() action = vistrail.actionMap[internal_version] pipeline = vistrail.getPipeline(internal_version) # try to make the subworkflow work with the package versions we have pipeline.ensure_modules_are_on_registry() pipeline.ensure_connection_specs() uuid = vistrail.get_annotation('__abstraction_uuid__').value if action.notes is not None: docstring = action.notes else: docstring = None d = {} input_modules = [] output_modules = [] for module in pipeline.module_list: #FIXME make this compare more robust if module.name == 'InputPort' and \ module.package == 'edu.utah.sci.vistrails.basic': input_modules.append(module) elif module.name == 'OutputPort' and \ module.package == 'edu.utah.sci.vistrails.basic': output_modules.append(module) input_ports = [] output_ports = [] input_remap = {} output_remap = {} for module in input_modules: (port_name, sigstring, optional, _) = \ get_port_spec_info(pipeline, module) input_ports.append((port_name, sigstring, optional)) input_remap[port_name] = module for module in output_modules: (port_name, sigstring, optional, _) = \ get_port_spec_info(pipeline, module) output_ports.append((port_name, sigstring, optional)) output_remap[port_name] = module # necessary for group d['_input_ports'] = input_ports d['_output_ports'] = output_ports d['input_remap'] = input_remap d['output_remap'] = output_remap d['pipeline'] = pipeline # abstraction specific d['vt_fname'] = vt_fname d['vistrail'] = vistrail d['internal_version'] = internal_version d['uuid'] = uuid # print "input_ports", d['_input_ports'] # print "output_ports", d['_output_ports'] return new_module(Abstraction, name, d, docstring)
repository_vt_id, repository_creator):
repository_vt_id, repository_creator, is_local=True):
def add_vt_to_db(self, host, port, db_name, user, vt_filepath, filename, repository_vt_id, repository_creator): """add_vt_to_db(host:str, port:int, db_name:str, user:str, vt_filepath:str(or datastream), filename:str, repository_vt_id:int, repository_creator:str) -> (return_status, int) This will add a vistrail in vt_filepath to the the database. If running on a remote machine, vt_filepath will contain vt file data stream. Before adding it it will annotate the vistrail with the repository_vt_id and repository_creator.
traceback.print_exc()
def add_vt_to_db(self, host, port, db_name, user, vt_filepath, filename, repository_vt_id, repository_creator): """add_vt_to_db(host:str, port:int, db_name:str, user:str, vt_filepath:str(or datastream), filename:str, repository_vt_id:int, repository_creator:str) -> (return_status, int) This will add a vistrail in vt_filepath to the the database. If running on a remote machine, vt_filepath will contain vt file data stream. Before adding it it will annotate the vistrail with the repository_vt_id and repository_creator.
old_db_vt_id):
old_db_vt_id, is_local=True):
def merge_vt(self, host, port, db_name, user, new_vt_filepath, old_db_vt_id): """ Merge new_vt (new_vt_filepath) with current vt (old_db_vt_id)