rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
gc.rect(int(self.x+border_width/2.0), int(self.y+border_width/2.0), int(self.width-2*border_width+1), int(self.height-2*border_width+1))
gc.rect(self.x+border_width/2.0-0.5, self.y+border_width/2.0-0.5, self.width-border_width/2.0, self.height-border_width/2.0)
def _draw_inset_border(self, gc, view_bounds=None, mode="default"): """ Draws the border of a component. Unlike the default Enable border, this one is drawn on the inside of the plot instead of around it. """ if not self.border_visible: return border_width = self.border_width gc.save_state() gc.set_line_width(border_width) gc.set_line_dash(self.border_dash_) gc.set_stroke_color(self.border_color_) gc.begin_path() gc.set_antialias(0) gc.rect(int(self.x+border_width/2.0), int(self.y+border_width/2.0), int(self.width-2*border_width+1), int(self.height-2*border_width+1)) gc.stroke_path() gc.restore_state()
width+2*padding, height+2*padding)
width+2*padding + border_width, height+2*padding + border_width)
def _draw_mainlayer(self, gc, view_bounds=None, mode="default"): text_color = self.text_color_ highlight_color = self.highlight_color_ highlight_bgcolor = self.highlight_bgcolor_ padding = self.cell_padding
x,y = self._cached_cell_coords[i,j+1] + self._text_offset + padding
x,y = self._cached_cell_coords[i,j+1] + self._text_offset + \ padding + border_width/2.0
def _draw_mainlayer(self, gc, view_bounds=None, mode="default"): text_color = self.text_color_ highlight_color = self.highlight_color_ highlight_bgcolor = self.highlight_bgcolor_ padding = self.cell_padding
gc.move_to(x, self.y-self.cell_border_width/2.0) gc.line_to(x, self.y+self.height+self.cell_border_width/2.0)
gc.move_to(x, self.y) gc.line_to(x, self.y+self.height)
def _draw_grid_lines(self, gc): gc.set_stroke_color(self.cell_border_color_) gc.set_line_dash(self.cell_border_style_) gc.set_line_width(self.cell_border_width)
gc.move_to(self.x-self.cell_border_width/2.0, y) gc.line_to(self.x+self.width+self.cell_border_width/2.0, y)
gc.move_to(self.x, y) gc.line_to(self.x+self.width, y)
def _draw_grid_lines(self, gc): gc.set_stroke_color(self.cell_border_color_) gc.set_line_dash(self.cell_border_style_) gc.set_line_width(self.cell_border_width)
x_points = arange(numcols+1) * cell_width + self.x y_points = arange(numrows+1) * cell_height + self.y
x_points = arange(numcols+1) * cell_width + self.cell_border_width/2.0 + self.x y_points = arange(numrows+1) * cell_height + self.cell_border_width/2.0 + self.y
def _compute_positions(self):
self.bounds = [ cols * (width + margin), rows * (height + margin)]
self.bounds = [ cols * (width + margin) + self.cell_border_width, rows * (height + margin) + self.cell_border_width ]
def _update_bounds(self): if self.string_array is not None and len(self.string_array.shape) == 2: rows, cols = self.string_array.shape margin = 2*self.cell_padding + self.cell_border_width width, height = self._get_actual_cell_size() self.bounds = [ cols * (width + margin),# - self.cell_border_width, rows * (height + margin)]# - self.cell_border_width ]
if self.control == self.control.GetCapture(): self.control.SetFocus()
self.control.SetFocus()
def _set_focus ( self ): "Sets the keyboard focus to this window" if self.control == self.control.GetCapture(): self.control.SetFocus() return
def __init__(self, *args, **kwds): super(SuperTuple, self).__init__(*args, **kwds) for i, attr in enumerate(self.__class__.__names__):
def __new__(cls, *args, **kwds): self = tuple.__new__(cls, *args, **kwds) for i, attr in enumerate(cls.__names__):
def __init__(self, *args, **kwds): super(SuperTuple, self).__init__(*args, **kwds) for i, attr in enumerate(self.__class__.__names__): setattr(self, attr, self[i])
file_inspector = Instance(FileInspector, args=())
filename = Str() line = Int(1) code = Str()
def getstats(cls, obj=None): self = cls(obj) return pstats.Stats(self)
tui.Item('main_results', show_label=False),
tui.UItem('main_results'),
def getstats(cls, obj=None): self = cls(obj) return pstats.Stats(self)
tui.Item('callee_results', show_label=False),
tui.UItem('callee_results'),
def getstats(cls, obj=None): self = cls(obj) return pstats.Stats(self)
tui.Item('caller_results', show_label=False), tui.Item('file_inspector', show_label=False),
tui.UItem('caller_results'), tui.UItem('filename', style='readonly'), tui.UItem('code', editor=tui.CodeEditor(line='line')),
def getstats(cls, obj=None): self = cls(obj) return pstats.Stats(self)
self.file_inspector.file_name = filename self.file_inspector.line = line else: self.file_inspector.file_name = '' self.file_inspector.line = 0 self.file_inspector.text = ''
with open(filename, 'ru') as f: code = f.read() self.code = code self.filename = filename self.line = line else: self.trait_set( code = '', filename = '', line = 1, )
def update_sub_results(self, new): if new is None: return self.caller_results.total_time = new.cum_time self.caller_results.records = new.callers self.callee_results._resort() self.caller_results.selected_record = self.caller_results.activated_record = None
""" Returns the width and height of the rendered text
""" Returns the bounding rect of the rendered text
def get_text_extent(self, text): """ Returns the width and height of the rendered text """ fm = self.gc.fontMetrics() rect = fm.boundingRect(text) return rect.width(), rect.height()
return rect.width(), rect.height()
return rect.left(), -fm.descent(), rect.right(), fm.height()
def get_text_extent(self, text): """ Returns the width and height of the rendered text """ fm = self.gc.fontMetrics() rect = fm.boundingRect(text) return rect.width(), rect.height()
fm = self.gc.fontMetrics() rect = fm.boundingRect(text) return rect.width(), rect.height(), -fm.descent(), fm.leading()
x1, y1, x2, y2 = self.get_text_extent(text) return x2, y2, y1, x1
def get_full_text_extent(self, text): """ Returns the width, height, descent and leading of the rendered text. """ fm = self.gc.fontMetrics() rect = fm.boundingRect(text) return rect.width(), rect.height(), -fm.descent(), fm.leading()
""" PDF currently ignores the alpha value
"""
def set_fill_color(self,color): """ PDF currently ignores the alpha value """ r,g,b = color[:3] try: a = color[3] except IndexError: a = 1.0 self.gc.setFillColorRGB(r, g, b)
self.gc.setFillColorRGB(r, g, b)
self.gc.setFillColorRGB(r, g, b, a)
def set_fill_color(self,color): """ PDF currently ignores the alpha value """ r,g,b = color[:3] try: a = color[3] except IndexError: a = 1.0 self.gc.setFillColorRGB(r, g, b)
""" PDF currently ignores the alpha value
"""
def set_stroke_color(self,color): """ PDF currently ignores the alpha value """ r,g,b = color[:3] try: a = color[3] except IndexError: a = 1.0 self.gc.setStrokeColorRGB(r, g, b)
self.gc.setStrokeColorRGB(r, g, b)
self.gc.setStrokeColorRGB(r, g, b, a)
def set_stroke_color(self,color): """ PDF currently ignores the alpha value """ r,g,b = color[:3] try: a = color[3] except IndexError: a = 1.0 self.gc.setStrokeColorRGB(r, g, b)
msg = "set_alpha not implemented on PDF yet." raise NotImplementedError, msg
self.gc.setFillAlpha(alpha) self.gc.setStrokeAlpha(alpha) super(GraphicsContext, self).set_alpha(alpha)
def set_alpha(self, alpha): """ """ msg = "set_alpha not implemented on PDF yet." raise NotImplementedError, msg
import Image as PilImage
from reportlab.lib.utils import ImageReader from PIL import Image as PilImage
def draw_image(self, img, rect=None): """ draw_image(img_gc, rect=(x,y,w,h)) Draws another gc into this one. If 'rect' is not provided, then the image gc is drawn into this one, rooted at (0,0) and at full pixel size. If 'rect' is provided, then the image is resized into the (w,h) given and drawn into this GC at point (x,y). img_gc is either a Numeric array (WxHx3 or WxHx4) or a GC from Kiva's Agg backend (kiva.agg.GraphicsContextArray). Requires the Python Imaging Library (PIL). """ # We turn img into a PIL object, since that is what ReportLab # requires. To do this, we first determine if the input image # GC needs to be converted to RGBA/RGB. If so, we see if we can # do it nicely (using convert_pixel_format), and if not, we do # it brute-force using Agg. import Image as PilImage from enthought.kiva import agg
pil_img = PilImage.new(format, (converted_img.width(), converted_img.height()))
pil_img = PilImage.fromstring(format, (converted_img.width(), converted_img.height()), converted_img.bmp_array.tostring())
def draw_image(self, img, rect=None): """ draw_image(img_gc, rect=(x,y,w,h)) Draws another gc into this one. If 'rect' is not provided, then the image gc is drawn into this one, rooted at (0,0) and at full pixel size. If 'rect' is provided, then the image is resized into the (w,h) given and drawn into this GC at point (x,y). img_gc is either a Numeric array (WxHx3 or WxHx4) or a GC from Kiva's Agg backend (kiva.agg.GraphicsContextArray). Requires the Python Imaging Library (PIL). """ # We turn img into a PIL object, since that is what ReportLab # requires. To do this, we first determine if the input image # GC needs to be converted to RGBA/RGB. If so, we see if we can # do it nicely (using convert_pixel_format), and if not, we do # it brute-force using Agg. import Image as PilImage from enthought.kiva import agg
self.gc.drawImage(pil_img, rect[0], rect[1], rect[2], rect[3])
self.gc.drawImage(ImageReader(pil_img), rect[0], rect[1], rect[2], rect[3])
def draw_image(self, img, rect=None): """ draw_image(img_gc, rect=(x,y,w,h)) Draws another gc into this one. If 'rect' is not provided, then the image gc is drawn into this one, rooted at (0,0) and at full pixel size. If 'rect' is provided, then the image is resized into the (w,h) given and drawn into this GC at point (x,y). img_gc is either a Numeric array (WxHx3 or WxHx4) or a GC from Kiva's Agg backend (kiva.agg.GraphicsContextArray). Requires the Python Imaging Library (PIL). """ # We turn img into a PIL object, since that is what ReportLab # requires. To do this, we first determine if the input image # GC needs to be converted to RGBA/RGB. If so, we see if we can # do it nicely (using convert_pixel_format), and if not, we do # it brute-force using Agg. import Image as PilImage from enthought.kiva import agg
print "boo"
def save_state(self): """ Save the current graphic's context state. This should always be paired with a restore_state """ self.gc.CGContextSaveGState() print "boo"
portal_url = getattr(site, "portal_url", None)
portal_url = getattr(site, "portal_url", None) request = getattr(site, "REQUEST", None)
def __init__(self, context, expose_schema=True): """ @param expose_schema: Map AT schema accessors directly to template variables for engines which cannot traverse Zope content (Cheetah). """ security=getSecurityManager() #portal_state = getMultiAdapter((context, context.REQUEST), name=u'plone_portal_state') try: portal_state = context.restrictedTraverse("@@plone_portal_state") except Unauthorized: # portal_state may be limited to admin users only portal_state = None except AttributeError: # traversing it not yet proplerly set up # may happen with some contexts, e.g. with LinguaPlone translate portal_state = None site = getSite() # Site might not have portal url when it is being duplicated through ZMI... # corner cases... you must love them! portal_url = getattr(site, "portal_url", None)
"request" : context.REQUEST,
"request" : request,
def __init__(self, context, expose_schema=True): """ @param expose_schema: Map AT schema accessors directly to template variables for engines which cannot traverse Zope content (Cheetah). """ security=getSecurityManager() #portal_state = getMultiAdapter((context, context.REQUEST), name=u'plone_portal_state') try: portal_state = context.restrictedTraverse("@@plone_portal_state") except Unauthorized: # portal_state may be limited to admin users only portal_state = None except AttributeError: # traversing it not yet proplerly set up # may happen with some contexts, e.g. with LinguaPlone translate portal_state = None site = getSite() # Site might not have portal url when it is being duplicated through ZMI... # corner cases... you must love them! portal_url = getattr(site, "portal_url", None)
DeckPercentageTrump({"test": ">=", "value": 33.33, "ratio": 0.8}),
DeckPercentageTrump({"test": ">=", "value": 100/3, "ratio": 0.8}),
def __init__(self): self.objects = [ DeckPercentageTrump({"test": ">=", "value": 33.33, "ratio": 0.8}), # le test ne fonctionne pas comme souhaiter
m = MP3(self.media) m.add_tags() m.tags['TIT2'] = id3.TIT2(encoding=2, text=u'text') m.save()
self.mp3.add_tags() self.mp3.tags['TIT2'] = id3.TIT2(encoding=2, text=u'text') self.mp3.save()
def write_tags(self): """Write all ID3v2.4 tags by mapping dub2id3_dict dictionnary with the respect of mutagen classes and methods"""
self.metadata = self.get_file_metadata()
try: self.metadata = self.get_file_metadata() except: self.metadata = {'title': '', 'artist': '', 'album': '', 'date': '', 'comment': '', 'genre': '', 'copyright': '', }
def __init__(self, media): self.media = media self.item_id = '' self.source = self.media self.options = {} self.bitrate_default = '192' self.cache_dir = os.sep + 'tmp' self.keys2id3 = {'title': 'TIT2', 'artist': 'TPE1', 'album': 'TALB', 'date': 'TDRC', 'comment': 'COMM', 'genre': 'TCON', 'copyright': 'TCOP', } self.mp3 = MP3(self.media, ID3=EasyID3) self.info = self.mp3.info self.bitrate = int(str(self.info.bitrate)[:-3]) self.length = datetime.timedelta(0,self.info.length) self.metadata = self.get_file_metadata()
try: self.metadata = self.get_file_metadata() except: self.metadata = {'title': '', 'artist': '', 'album': '', 'date': '', 'comment': '', 'genre': '', 'copyright': '', }
self.metadata = self.get_file_metadata()
def __init__(self, media): self.media = media self.item_id = '' self.source = self.media self.options = {} self.bitrate_default = '192' self.cache_dir = os.sep + 'tmp' self.keys2id3 = {'title': 'TIT2', 'artist': 'TPE1', 'album': 'TALB', 'date': 'TDRC', 'comment': 'COMM', 'genre': 'TCON', 'copyright': 'TCOP', } self.mp3 = MP3(self.media, ID3=EasyID3) self.info = self.mp3.info self.bitrate = int(str(self.info.bitrate)[:-3]) self.length = datetime.timedelta(0,self.info.length) try: self.metadata = self.get_file_metadata() except: self.metadata = {'title': '', 'artist': '', 'album': '', 'date': '', 'comment': '', 'genre': '', 'copyright': '', } self.description = self.get_description() self.mime_type = self.get_mime_type() self.media_info = get_file_info(self.media) self.file_name = self.media_info[0] self.file_title = self.media_info[1] self.file_ext = self.media_info[2] self.extension = self.get_file_extension() self.size = os.path.getsize(media) #self.args = self.get_args()
self.mp3.close()
def get_file_metadata(self): metadata = {} for key in self.keys2id3.keys(): try: metadata[key] = self.mp3[key][0] except: metadata[key] = '' self.mp3.close() return metadata
self.osc_controller.add_method('/relay', 'i', self.relay_callback)
self.osc_controller.add_method('/media/relay', 'i', self.relay_callback)
def __init__(self, station, q, logger, m3u): Thread.__init__(self) self.station = station self.q = q self.logger = logger self.channel = shout.Shout() self.id = 999999 self.counter = 0 self.command = 'cat ' self.delay = 0
if not os.path.exists()self.record_dir):
if not os.path.exists(self.record_dir):
def __init__(self, station, q, logger, m3u): Thread.__init__(self) self.station = station self.q = q self.logger = logger self.channel = shout.Shout() self.id = 999999 self.counter = 0 self.command = 'cat ' self.delay = 0
self.record_callback('/write', [1])
self.record_callback('/record', [1])
def __init__(self, station, q, logger, m3u): Thread.__init__(self) self.station = station self.q = q self.logger = logger self.channel = shout.Shout() self.id = 999999 self.counter = 0 self.command = 'cat ' self.delay = 0
message = 'New track ! %s %s on
message = '
def get_next_media(self): # Init playlist if self.lp != 0: old_playlist = self.playlist new_playlist = self.get_playlist() lp_new = len(new_playlist)
self.tinyurl = tinyurl.create_one(self.channel.url + '/m3u/' + self.m3u.split(os.sep)[-1])
def __init__(self, station, q, logger, m3u): Thread.__init__(self) self.station = station self.q = q self.logger = logger self.channel = shout.Shout() self.id = 999999 self.counter = 0 self.command = 'cat ' self.delay = 0
def __init__(self, username, password):
def __init__(self, access_token_key, access_token_secret):
def __init__(self, username, password): import twitter self.username = username self.password = password self.api = twitter.Api(username=self.username, password=self.password)
self.username = username self.password = password self.api = twitter.Api(username=self.username, password=self.password)
self.username = TWITTER_CONSUMER_KEY self.password = TWITTER_CONSUMER_SECRET self.access_token_key = access_token_key self.access_token_secret = access_token_secret self.api = twitter.Api(username=self.username, password=self.password, access_token_key=self.access_token_key, access_token_secret=self.access_token_secret)
def __init__(self, username, password): import twitter self.username = username self.password = password self.api = twitter.Api(username=self.username, password=self.password)
message = 'New track ! %s
artist_names = artist.split(' ') artist_tags = ' message = 'New track ! %s %s on
def get_next_media(self): # Init playlist if self.lp != 0: old_playlist = self.playlist new_playlist = self.get_playlist() lp_new = len(new_playlist)
def update_twitter(self): artist_names = self.artist.split(' ') artist_tags = ' message = '♫ %s %s on
def update_twitter(self, message=None): if not message: artist_names = self.artist.split(' ') artist_tags = ' message = '♫ %s %s on
def update_twitter(self): artist_names = self.artist.split(' ') artist_tags = ' #'.join(list(set(artist_names)-set(['&', '-']))) message = '♫ %s %s on #%s #%s' % (self.prefix, self.song, self.short_name, artist_tags) tags = '#' + ' #'.join(self.twitter_tags) message = message + ' ' + tags message = message[:113] + ' ' + self.tinyurl message = message.decode('utf8') self.logger.write('Twitting : "' + message + '"') self.twitter.post(message)
artist_names = self.artist.split(' ') artist_tags = ' message = '♫ %s %s on tags = ' message = message + ' ' + tags message = message[:107] + ' M3U : ' + self.m3u_tinyurl self.update_twitter(message) self.channel.set_metadata({'song': self.song, 'charset': 'utf8',})
self.update_twitter_current() self.channel.set_metadata({'song': self.song, 'charset': 'utf8',})
def run(self): while self.run_mode: self.q.get(1) self.next_media = 0 self.media = self.get_next_media() self.counter += 1
self.channel.close() self.channel.open()
try: self.channel.open() except: self.logger.write_error('Station ' + self.short_name + ' : could connect to the server ') continue
def run(self): while True: self.q.get(1) self.next_media = 0 self.media = self.get_next_media() self.counter += 1
media.metadata = {'artist': self.artist, 'title': self.title, 'album': self.short_name, 'genre': self.channel.genre}
media.metadata = {'artist': self.artist.encode('utf-8'), 'title': self.title.encode('utf-8'), 'album': self.short_name.encode('utf-8'), 'genre': self.channel.genre.encode('utf-8')}
def record_callback(self, path, value): value = value[0] if value == 1: self.rec_file = self.short_name + '-' + \ datetime.datetime.now().strftime("%x-%X").replace('/', '_') + '.' + self.channel.format self.recorder = Recorder(self.record_dir) self.recorder.open(self.rec_file) elif value == 0: self.recorder.close() if self.channel.format == 'mp3': media = Mp3(self.record_dir + os.sep + self.rec_file) if self.channel.format == 'ogg': media = Ogg(self.record_dir + os.sep + self.rec_file) media.metadata = {'artist': self.artist, 'title': self.title, 'album': self.short_name, 'genre': self.channel.genre} media.write_tags() self.record_mode = value message = "Received OSC message '%s' with arguments '%d'" % (path, value) self.logger.write_info(message)
self.channel.set_metadata({'song': self.song, 'charset': 'utf8',})
self.channel.set_metadata({'song': self.song, 'charset': 'utf-8',})
def run(self): while self.run_mode: self.q.get(1) self.next_media = 0 self.media = self.get_next_media() self.counter += 1 if self.relay_mode: self.set_relay_mode() elif os.path.exists(self.media) and not os.sep+'.' in self.media: if self.lp == 0: self.logger.write_error('Station ' + self.short_name + ' has no media to stream !') break self.set_read_mode() self.q.task_done()
version='0.4.1',
version='0.5',
def _compile_po_files (self): data_files = []
condorSubmitFile.write('universe = ' + universe + '\n') condorSubmitFile.write('executable = ' + executable + '\n') condorSubmitFile.write('transfer_executable = ' + transfer_executable + '\n')
condorSubmitFile.write('universe = ' + universe + '\n' + 'executable = ' + executable + '\n' + 'transfer_executable = ' + transfer_executable + '\n' + 'when_to_transfer_output = ' + when_to_transfer_output + '\n' + 'Requirements = ' + requirements + '\n' + '+Owner = ' + owner + '\n' + 'log = ' + logfile + '\n' + 'output = ' + outputfile + '\n' + 'error = ' + errorfile + '\n' + 'notification = ' + notification + '\n' + '+IsSleep = 1\n')
def run(config): os.environ['_CONDOR_SEC_DEFAULT_AUTHENTICATION_METHODS']='GSI' os.environ['X509_USER_PROXY']=config.proxyFile import glideKeeper import condorMonitor,condorManager gktid=glideKeeper.GlideKeeperThread(config.webURL,config.descriptFile,config.descriptSignature, config.runId, config.myClassadID, [(config.gfactoryNode,config.gfactoryClassadID)],config.gfactoryConstraint, config.collectorNode, config.proxyFile) gktid.start() workingDir = os.getcwd() os.makedirs(workingDir + '/' + startTime) main_log_fname=workingDir + '/' + startTime + '/glideTester.log' main_log=open(main_log_fname,'w') try: # first load the file, so we check it is readable fd = open(config.params, 'r') try: lines = fd.readlines() finally: fd.close() # reset the values executable = None inputFile = None outputFile = None environment = None arguments = None concurrency = None runs = 1 # read the values for line in lines: line = line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr = line.split('=',1) if len(arr) != 2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key = arr[0].strip() val = arr[1].strip() if key == 'executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable = val elif key == 'transfer_input_files': inputFile = val elif key == 'transfer_output_files': outputFile = val elif key == 'environment': environment = val elif key == 'arguments': arguments = val elif key == 'concurrency': concurrency=val elif key == 'runs': runs = int(val) concurrencyLevel = concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe = 'vanilla' if executable == None: raise RuntimeError, "executable was not defined!" transfer_executable = "True" when_to_transfer_output = "ON_EXIT" requirements = '(GLIDEIN_Site =!= "UCSD12") && (Arch =!= "abc")' owner = 'Undefined' notification = 'Never' # Create a testing loop for each run for l in range(0, runs, 1): main_log.write("Iteration %i\n"%l) # Create a testing loop for each concurrency for k in range(0, len(concurrencyLevel), 1): main_log.write("Concurrency %i\n"%int(concurrencyLevel[k])) # request the glideins # we want 10% more glideins than the concurrency level requestedGlideins = int(concurrencyLevel[k]) totalGlideins = int(requestedGlideins + .1 * requestedGlideins) gktid.request_glideins(totalGlideins) main_log.write("%s %i Glideins requested\n"%(ctime(),totalGlideins)) # now we create the directories for each job and a submit file workingDir = os.getcwd() loop = 0 dir1 = workingDir + '/' + startTime + '/concurrency_' + concurrencyLevel[k] + '_run_' + str(l) + '/' os.makedirs(dir1) logfile = workingDir + '/' + startTime + '/con_' + concurrencyLevel[k] + '_run_' + str(l) + '.log' outputfile = 'concurrency_' + concurrencyLevel[k] + '.out' errorfile = 'concurrency_' + concurrencyLevel[k] + '.err' filename = executable + '_concurrency_' + concurrencyLevel[k] + '_run_' + str(l) + '_submit.condor' condorSubmitFile=open(filename, "w") condorSubmitFile.write('universe = ' + universe + '\n') condorSubmitFile.write('executable = ' + executable + '\n') condorSubmitFile.write('transfer_executable = ' + transfer_executable + '\n') if inputFile != None: condorSubmitFile.write('transfer_input_files = ' + inputFile + '\n') if outputFile != None: condorSubmitFile.write('transfer_output_files = ' + outputFile + '\n') if environment != None: condorSubmitFile.write('environment = ' + environment + '\n') condorSubmitFile.write('when_to_transfer_output = ' + when_to_transfer_output + '\n') condorSubmitFile.write('Requirements = ' + requirements + '\n') condorSubmitFile.write('+Owner = ' + owner + '\n') condorSubmitFile.write('log = ' + logfile + '\n') condorSubmitFile.write('output = ' + outputfile + '\n') condorSubmitFile.write('error = ' + errorfile + '\n') condorSubmitFile.write('notification = ' + notification + '\n') condorSubmitFile.write('+IsSleep = 1\n') condorSubmitFile.write('x509userproxy = ' + config.proxyFile + '\n\n') if arguments != None: condorSubmitFile.write('Arguments = ' + arguments + '\n') for j in range(0, int(concurrencyLevel[k]), 1): condorSubmitFile.write('Initialdir = ' + dir1 + 'job' + str(loop) + '\n') condorSubmitFile.write('Queue\n\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) condorSubmitFile.close() # Need to figure out when we have all the glideins # Ask the glidekeeper object finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() main_log.write("%s %s %s %s %s\n"%(ctime(), 'we have', numberGlideins, 'glideins, need', requestedGlideins)) main_log.flush() sleep(5) if numberGlideins >= requestedGlideins: finished = "true" # Now we begin submission and monitoring submission = condorManager.condorSubmitOne(filename) main_log.write("%s %s\n"%(ctime(), "file submitted")) shutil.move(filename, workingDir + '/' + startTime + '/' + filename) running = "true" while running != "false": check1 = condorMonitor.CondorQ() try: # i actually want to see all jos, not only running ones check1.load('JobStatus<3', []) data=check1.fetchStored() except: main_log.write("%s %s\n"%(ctime(), "condor_q failed... ignoring for now")) main_log.flush() sleep(2) continue # retry the while loop main_log.write("%s %s %s\n"%(ctime(), len(data.keys()), 'jobs running')) main_log.flush() if len(data.keys()) == 0: running = "false" main_log.write("%s %s\n"%(ctime(), "no more running jobs")) else: sleep(10) main_log.write("%s %s\n"%(ctime(), "Done")) finally: main_log.write("%s %s\n"%(ctime(), "getting out")) main_log.flush() gktid.soft_kill() gktid.join() return
condorSubmitFile.write('when_to_transfer_output = ' + when_to_transfer_output + '\n') condorSubmitFile.write('Requirements = ' + requirements + '\n') condorSubmitFile.write('+Owner = ' + owner + '\n') condorSubmitFile.write('log = ' + logfile + '\n') condorSubmitFile.write('output = ' + outputfile + '\n') condorSubmitFile.write('error = ' + errorfile + '\n') condorSubmitFile.write('notification = ' + notification + '\n') condorSubmitFile.write('+IsSleep = 1\n') condorSubmitFile.write('x509userproxy = ' + config.proxyFile + '\n\n')
def run(config): os.environ['_CONDOR_SEC_DEFAULT_AUTHENTICATION_METHODS']='GSI' os.environ['X509_USER_PROXY']=config.proxyFile import glideKeeper import condorMonitor,condorManager gktid=glideKeeper.GlideKeeperThread(config.webURL,config.descriptFile,config.descriptSignature, config.runId, config.myClassadID, [(config.gfactoryNode,config.gfactoryClassadID)],config.gfactoryConstraint, config.collectorNode, config.proxyFile) gktid.start() workingDir = os.getcwd() os.makedirs(workingDir + '/' + startTime) main_log_fname=workingDir + '/' + startTime + '/glideTester.log' main_log=open(main_log_fname,'w') try: # first load the file, so we check it is readable fd = open(config.params, 'r') try: lines = fd.readlines() finally: fd.close() # reset the values executable = None inputFile = None outputFile = None environment = None arguments = None concurrency = None runs = 1 # read the values for line in lines: line = line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr = line.split('=',1) if len(arr) != 2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key = arr[0].strip() val = arr[1].strip() if key == 'executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable = val elif key == 'transfer_input_files': inputFile = val elif key == 'transfer_output_files': outputFile = val elif key == 'environment': environment = val elif key == 'arguments': arguments = val elif key == 'concurrency': concurrency=val elif key == 'runs': runs = int(val) concurrencyLevel = concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe = 'vanilla' if executable == None: raise RuntimeError, "executable was not defined!" transfer_executable = "True" when_to_transfer_output = "ON_EXIT" requirements = '(GLIDEIN_Site =!= "UCSD12") && (Arch =!= "abc")' owner = 'Undefined' notification = 'Never' # Create a testing loop for each run for l in range(0, runs, 1): main_log.write("Iteration %i\n"%l) # Create a testing loop for each concurrency for k in range(0, len(concurrencyLevel), 1): main_log.write("Concurrency %i\n"%int(concurrencyLevel[k])) # request the glideins # we want 10% more glideins than the concurrency level requestedGlideins = int(concurrencyLevel[k]) totalGlideins = int(requestedGlideins + .1 * requestedGlideins) gktid.request_glideins(totalGlideins) main_log.write("%s %i Glideins requested\n"%(ctime(),totalGlideins)) # now we create the directories for each job and a submit file workingDir = os.getcwd() loop = 0 dir1 = workingDir + '/' + startTime + '/concurrency_' + concurrencyLevel[k] + '_run_' + str(l) + '/' os.makedirs(dir1) logfile = workingDir + '/' + startTime + '/con_' + concurrencyLevel[k] + '_run_' + str(l) + '.log' outputfile = 'concurrency_' + concurrencyLevel[k] + '.out' errorfile = 'concurrency_' + concurrencyLevel[k] + '.err' filename = executable + '_concurrency_' + concurrencyLevel[k] + '_run_' + str(l) + '_submit.condor' condorSubmitFile=open(filename, "w") condorSubmitFile.write('universe = ' + universe + '\n') condorSubmitFile.write('executable = ' + executable + '\n') condorSubmitFile.write('transfer_executable = ' + transfer_executable + '\n') if inputFile != None: condorSubmitFile.write('transfer_input_files = ' + inputFile + '\n') if outputFile != None: condorSubmitFile.write('transfer_output_files = ' + outputFile + '\n') if environment != None: condorSubmitFile.write('environment = ' + environment + '\n') condorSubmitFile.write('when_to_transfer_output = ' + when_to_transfer_output + '\n') condorSubmitFile.write('Requirements = ' + requirements + '\n') condorSubmitFile.write('+Owner = ' + owner + '\n') condorSubmitFile.write('log = ' + logfile + '\n') condorSubmitFile.write('output = ' + outputfile + '\n') condorSubmitFile.write('error = ' + errorfile + '\n') condorSubmitFile.write('notification = ' + notification + '\n') condorSubmitFile.write('+IsSleep = 1\n') condorSubmitFile.write('x509userproxy = ' + config.proxyFile + '\n\n') if arguments != None: condorSubmitFile.write('Arguments = ' + arguments + '\n') for j in range(0, int(concurrencyLevel[k]), 1): condorSubmitFile.write('Initialdir = ' + dir1 + 'job' + str(loop) + '\n') condorSubmitFile.write('Queue\n\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) condorSubmitFile.close() # Need to figure out when we have all the glideins # Ask the glidekeeper object finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() main_log.write("%s %s %s %s %s\n"%(ctime(), 'we have', numberGlideins, 'glideins, need', requestedGlideins)) main_log.flush() sleep(5) if numberGlideins >= requestedGlideins: finished = "true" # Now we begin submission and monitoring submission = condorManager.condorSubmitOne(filename) main_log.write("%s %s\n"%(ctime(), "file submitted")) shutil.move(filename, workingDir + '/' + startTime + '/' + filename) running = "true" while running != "false": check1 = condorMonitor.CondorQ() try: # i actually want to see all jos, not only running ones check1.load('JobStatus<3', []) data=check1.fetchStored() except: main_log.write("%s %s\n"%(ctime(), "condor_q failed... ignoring for now")) main_log.flush() sleep(2) continue # retry the while loop main_log.write("%s %s %s\n"%(ctime(), len(data.keys()), 'jobs running')) main_log.flush() if len(data.keys()) == 0: running = "false" main_log.write("%s %s\n"%(ctime(), "no more running jobs")) else: sleep(10) main_log.write("%s %s\n"%(ctime(), "Done")) finally: main_log.write("%s %s\n"%(ctime(), "getting out")) main_log.flush() gktid.soft_kill() gktid.join() return
summDir = workingDir + '/' + startTime + '/summaries/' os.makedirs(summDir) for l in range(0, runs, 1): for k in range(0, len(concurrencyLevel), 1): results=[] hours=[] minutes=[] seconds=[] jobStartInfo=[] jobExecuteInfo=[] jobFinishInfo=[] jobStatus=[] logFile = workingDir + '/' + startTime + '/con_' + concurrencyLevel[k] + '_run_' + str(l) + '.log' lf = open(logFile, 'r') try: lines1 = lf.readlines() finally: lf.close() jobsSubmitted = 0 for line in lines1: line = line.strip() if line[0:1] not in ('0','1','2','3','4','5','6','7','8','9','('): continue arr1=line.split(' ',7) if arr1[5] == "Bytes" or arr1[4] =="Image": continue if arr1[5] == "submitted": jobNum = arr1[1].strip('()') jobStartInfo.append(jobNum) jobStartInfo.append(arr1[3]) jobsSubmitted=jobsSubmitted+1 if arr1[5] == "executing": jobNum = arr1[1].strip('()') jobExecuteInfo.append(jobNum) jobExecuteInfo.append(arr1[3]) if arr1[5] == "terminated.": jobNum = arr1[1].strip('()') jobFinishInfo.append(jobNum) jobFinishInfo.append(arr1[3]) if arr1[4] == "value": status=arr1[5].split(')',1) jobFinishInfo.append(status[0]) minExeTime=1e20 maxExeTime=0 minFinTime=1e20 maxFinTime=0 iter=0 for i in range(0, len(jobStartInfo), 2): if jobStartInfo[i] in jobExecuteInfo: index = jobExecuteInfo.index(jobStartInfo[i]) timeJobStart = jobStartInfo[i + 1] timeJobExecute = jobExecuteInfo[index + 1] timeStart = timeJobStart.split(':', 2) timeExecute = timeJobExecute.split(':', 2) diffHours = (int(timeExecute[0]) - int(timeStart[0])) * 3600 diffMinutes = (int(timeExecute[1]) - int(timeStart[1])) * 60 diffSeconds = int(timeExecute[2]) - int(timeStart[2]) executeTime = diffHours + diffMinutes + diffSeconds index2 = jobFinishInfo.index(jobStartInfo[i]) timeJobFinish = jobFinishInfo[index2 + 1] stat = jobFinishInfo[index2 +2] timeFinish = timeJobFinish.split(':', 2) diffHours2 = (int(timeFinish[0]) - int(timeExecute[0])) * 3600 diffMinutes2 = (int(timeFinish[1]) - int(timeExecute[1])) * 60 diffSeconds2 = int(timeFinish[2]) - int(timeExecute[2]) finishTime = diffHours2 + diffMinutes2 + diffSeconds2 resultData = [iter, executeTime, finishTime, stat] results.append(resultData) iter = iter + 1 if executeTime > maxExeTime: maxExeTime = executeTime if executeTime < minExeTime: minExeTime = executeTime if finishTime > maxFinTime: maxFinTime = finishTime if finishTime < minFinTime: minFinTime = finishTime filePath = summDir + 'con_' + concurrencyLevel[k] + '_run_' + str(l) + '.txt' file=open(filePath, 'w') header = " file.write(header) exeTime=0 finTime=0 for i in range(0, int(concurrencyLevel[k])): exeTime = exeTime + results[i][1] finTime = finTime + results[i][2] writeData = str(results[i][0]) + '\t' + str(results[i][1]) + '\t\t' + str(results[i][2]) + '\t\t' + results[i][3] + '\n' file.write(writeData) aveExeTime = exeTime/int(concurrencyLevel[k]) aveFinTime = finTime/int(concurrencyLevel[k]) file.close() filepath = summDir + 'results.txt' file=open(filepath, 'a') times = "Concurrency Level = " + concurrencyLevel[k] + "\tExecute Time(Ave/Min/Max) = " + str(aveExeTime) + '/' + str(minExeTime) + '/' + str(maxExeTime) + "\tFinish Time(Ave/Min/Max) = " + str(aveFinTime) + "/" + str(minFinTime) + "/" + str(maxFinTime) + '\n' file.write(times) file.close()
def run(config): os.environ['_CONDOR_SEC_DEFAULT_AUTHENTICATION_METHODS']='GSI' os.environ['X509_USER_PROXY']=config.proxyFile import glideKeeper import condorMonitor,condorManager gktid=glideKeeper.GlideKeeperThread(config.webURL,config.descriptFile,config.descriptSignature, config.runId, config.myClassadID, [(config.gfactoryNode,config.gfactoryClassadID)],config.gfactoryConstraint, config.collectorNode, config.proxyFile) gktid.start() workingDir = os.getcwd() os.makedirs(workingDir + '/' + startTime) main_log_fname=workingDir + '/' + startTime + '/glideTester.log' main_log=open(main_log_fname,'w') try: # first load the file, so we check it is readable fd = open(config.params, 'r') try: lines = fd.readlines() finally: fd.close() # reset the values executable = None inputFile = None outputFile = None environment = None arguments = None concurrency = None runs = 1 # read the values for line in lines: line = line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr = line.split('=',1) if len(arr) != 2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key = arr[0].strip() val = arr[1].strip() if key == 'executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable = val elif key == 'transfer_input_files': inputFile = val elif key == 'transfer_output_files': outputFile = val elif key == 'environment': environment = val elif key == 'arguments': arguments = val elif key == 'concurrency': concurrency=val elif key == 'runs': runs = int(val) concurrencyLevel = concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe = 'vanilla' if executable == None: raise RuntimeError, "executable was not defined!" transfer_executable = "True" when_to_transfer_output = "ON_EXIT" requirements = '(GLIDEIN_Site =!= "UCSD12") && (Arch =!= "abc")' owner = 'Undefined' notification = 'Never' # Create a testing loop for each run for l in range(0, runs, 1): main_log.write("Iteration %i\n"%l) # Create a testing loop for each concurrency for k in range(0, len(concurrencyLevel), 1): main_log.write("Concurrency %i\n"%int(concurrencyLevel[k])) # request the glideins # we want 10% more glideins than the concurrency level requestedGlideins = int(concurrencyLevel[k]) totalGlideins = int(requestedGlideins + .1 * requestedGlideins) gktid.request_glideins(totalGlideins) main_log.write("%s %i Glideins requested\n"%(ctime(),totalGlideins)) # now we create the directories for each job and a submit file workingDir = os.getcwd() loop = 0 dir1 = workingDir + '/' + startTime + '/concurrency_' + concurrencyLevel[k] + '_run_' + str(l) + '/' os.makedirs(dir1) logfile = workingDir + '/' + startTime + '/con_' + concurrencyLevel[k] + '_run_' + str(l) + '.log' outputfile = 'concurrency_' + concurrencyLevel[k] + '.out' errorfile = 'concurrency_' + concurrencyLevel[k] + '.err' filename = executable + '_concurrency_' + concurrencyLevel[k] + '_run_' + str(l) + '_submit.condor' condorSubmitFile=open(filename, "w") condorSubmitFile.write('universe = ' + universe + '\n') condorSubmitFile.write('executable = ' + executable + '\n') condorSubmitFile.write('transfer_executable = ' + transfer_executable + '\n') if inputFile != None: condorSubmitFile.write('transfer_input_files = ' + inputFile + '\n') if outputFile != None: condorSubmitFile.write('transfer_output_files = ' + outputFile + '\n') if environment != None: condorSubmitFile.write('environment = ' + environment + '\n') condorSubmitFile.write('when_to_transfer_output = ' + when_to_transfer_output + '\n') condorSubmitFile.write('Requirements = ' + requirements + '\n') condorSubmitFile.write('+Owner = ' + owner + '\n') condorSubmitFile.write('log = ' + logfile + '\n') condorSubmitFile.write('output = ' + outputfile + '\n') condorSubmitFile.write('error = ' + errorfile + '\n') condorSubmitFile.write('notification = ' + notification + '\n') condorSubmitFile.write('+IsSleep = 1\n') condorSubmitFile.write('x509userproxy = ' + config.proxyFile + '\n\n') if arguments != None: condorSubmitFile.write('Arguments = ' + arguments + '\n') for j in range(0, int(concurrencyLevel[k]), 1): condorSubmitFile.write('Initialdir = ' + dir1 + 'job' + str(loop) + '\n') condorSubmitFile.write('Queue\n\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) condorSubmitFile.close() # Need to figure out when we have all the glideins # Ask the glidekeeper object finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() main_log.write("%s %s %s %s %s\n"%(ctime(), 'we have', numberGlideins, 'glideins, need', requestedGlideins)) main_log.flush() sleep(5) if numberGlideins >= requestedGlideins: finished = "true" # Now we begin submission and monitoring submission = condorManager.condorSubmitOne(filename) main_log.write("%s %s\n"%(ctime(), "file submitted")) shutil.move(filename, workingDir + '/' + startTime + '/' + filename) running = "true" while running != "false": check1 = condorMonitor.CondorQ() try: # i actually want to see all jos, not only running ones check1.load('JobStatus<3', []) data=check1.fetchStored() except: main_log.write("%s %s\n"%(ctime(), "condor_q failed... ignoring for now")) main_log.flush() sleep(2) continue # retry the while loop main_log.write("%s %s %s\n"%(ctime(), len(data.keys()), 'jobs running')) main_log.flush() if len(data.keys()) == 0: running = "false" main_log.write("%s %s\n"%(ctime(), "no more running jobs")) else: sleep(10) main_log.write("%s %s\n"%(ctime(), "Done")) finally: main_log.write("%s %s\n"%(ctime(), "getting out")) main_log.flush() gktid.soft_kill() gktid.join() return
self.add_dir_obj(cWDictFile.symlinkSupport(web_stage_dir,'web',work_dir))
self.add_dir_obj(cWDictFile.symlinkSupport(web_stage_dir,os.path.join(work_dir,'web'),"web"))
def __init__(self,work_dir, web_stage_dir=None): # if None, create a web subdir in the work_dir; someone else need to copy it to the place visible by web_url if web_stage_dir==None: web_stage_dir=os.path.join(work_dir,'web') cvWDictFile.frontendMainDicts.__init__(self,work_dir,web_stage_dir, workdir_name="web",simple_work_dir=True,assume_groups=False) self.add_dir_obj(cWDictFile.symlinkSupport(web_stage_dir,'web',work_dir))
writeData = str(results[i][0]) + '\t' + str(results[i][1]) + '\t\t' + str(results[i][2]) + '\t\t' + results[i][3] + '\n'
writeData = str(results[i][0]) + '\t' + str(results[i][1]) + '\t' + str(results[i][2]) + '\t' + results[i][3] + '\n'
def run(config): os.environ['_CONDOR_SEC_DEFAULT_AUTHENTICATION_METHODS']='GSI' os.environ['X509_USER_PROXY']=config.proxyFile import glideKeeper import condorMonitor,condorManager gktid=glideKeeper.GlideKeeperThread(config.webURL,config.descriptFile,config.descriptSignature, config.runId, config.myClassadID, [(config.gfactoryNode,config.gfactoryClassadID)],config.gfactoryConstraint, config.collectorNode, config.proxyFile) gktid.start() workingDir = os.getcwd() os.makedirs(workingDir + '/' + startTime) main_log_fname=workingDir + '/' + startTime + '/glideTester.log' main_log=open(main_log_fname,'w') try: # first load the file, so we check it is readable fd = open(config.params, 'r') try: lines = fd.readlines() finally: fd.close() # reset the values executable = None inputFile = None outputFile = None environment = None arguments = None concurrency = None runs = 1 # read the values for line in lines: line = line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr = line.split('=',1) if len(arr) != 2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key = arr[0].strip() val = arr[1].strip() if key == 'executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable = val elif key == 'transfer_input_files': inputFile = val elif key == 'transfer_output_files': outputFile = val elif key == 'environment': environment = val elif key == 'arguments': arguments = val elif key == 'concurrency': concurrency=val elif key == 'runs': runs = int(val) concurrencyLevel = concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe = 'vanilla' if executable == None: raise RuntimeError, "executable was not defined!" transfer_executable = "True" when_to_transfer_output = "ON_EXIT" requirements = '(GLIDEIN_Site =!= "UCSD12") && (Arch =!= "abc")' owner = 'Undefined' notification = 'Never' # Create a testing loop for each run for l in range(0, runs, 1): main_log.write("Iteration %i\n"%l) # Create a testing loop for each concurrency for k in range(0, len(concurrencyLevel), 1): main_log.write("Concurrency %i\n"%int(concurrencyLevel[k])) # request the glideins # we want 10% more glideins than the concurrency level requestedGlideins = int(concurrencyLevel[k]) totalGlideins = int(requestedGlideins + .1 * requestedGlideins) gktid.request_glideins(totalGlideins) main_log.write("%s %i Glideins requested\n"%(ctime(),totalGlideins)) # now we create the directories for each job and a submit file workingDir = os.getcwd() loop = 0 dir1 = workingDir + '/' + startTime + '/concurrency_' + concurrencyLevel[k] + '_run_' + str(l) + '/' os.makedirs(dir1) logfile = workingDir + '/' + startTime + '/con_' + concurrencyLevel[k] + '_run_' + str(l) + '.log' outputfile = 'concurrency_' + concurrencyLevel[k] + '.out' errorfile = 'concurrency_' + concurrencyLevel[k] + '.err' filename = executable + '_concurrency_' + concurrencyLevel[k] + '_run_' + str(l) + '_submit.condor' condorSubmitFile=open(filename, "w") condorSubmitFile.write('universe = ' + universe + '\n' + 'executable = ' + executable + '\n' + 'transfer_executable = ' + transfer_executable + '\n' + 'when_to_transfer_output = ' + when_to_transfer_output + '\n' + 'Requirements = ' + requirements + '\n' + '+Owner = ' + owner + '\n' + 'log = ' + logfile + '\n' + 'output = ' + outputfile + '\n' + 'error = ' + errorfile + '\n' + 'notification = ' + notification + '\n' + '+IsSleep = 1\n') if inputFile != None: condorSubmitFile.write('transfer_input_files = ' + inputFile + '\n') if outputFile != None: condorSubmitFile.write('transfer_output_files = ' + outputFile + '\n') if environment != None: condorSubmitFile.write('environment = ' + environment + '\n') if arguments != None: condorSubmitFile.write('Arguments = ' + arguments + '\n') condorSubmitFile.write('x509userproxy = ' + config.proxyFile + '\n\n') for j in range(0, int(concurrencyLevel[k]), 1): condorSubmitFile.write('Initialdir = ' + dir1 + 'job' + str(loop) + '\n') condorSubmitFile.write('Queue\n\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) condorSubmitFile.close() # Need to figure out when we have all the glideins # Ask the glidekeeper object finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() main_log.write("%s %s %s %s %s\n"%(ctime(), 'we have', numberGlideins, 'glideins, need', requestedGlideins)) main_log.flush() sleep(5) if numberGlideins >= requestedGlideins: finished = "true" # Now we begin submission and monitoring submission = condorManager.condorSubmitOne(filename) main_log.write("%s %s\n"%(ctime(), "file submitted")) shutil.move(filename, workingDir + '/' + startTime + '/' + filename) running = "true" while running != "false": check1 = condorMonitor.CondorQ() try: # i actually want to see all jos, not only running ones check1.load('JobStatus<3', []) data=check1.fetchStored() except: main_log.write("%s %s\n"%(ctime(), "condor_q failed... ignoring for now")) main_log.flush() sleep(2) continue # retry the while loop main_log.write("%s %s %s\n"%(ctime(), len(data.keys()), 'jobs running')) main_log.flush() if len(data.keys()) == 0: running = "false" main_log.write("%s %s\n"%(ctime(), "no more running jobs")) else: sleep(10) main_log.write("%s %s\n"%(ctime(), "Done")) # Now we parse the log files # Create a loop to parse each log file into a summaries directory summDir = workingDir + '/' + startTime + '/summaries/' os.makedirs(summDir) for l in range(0, runs, 1): for k in range(0, len(concurrencyLevel), 1): # Initialize empty arrays for data results=[] hours=[] minutes=[] seconds=[] jobStartInfo=[] jobExecuteInfo=[] jobFinishInfo=[] jobStatus=[] # Parse each log file logFile = workingDir + '/' + startTime + '/con_' + concurrencyLevel[k] + '_run_' + str(l) + '.log' lf = open(logFile, 'r') try: lines1 = lf.readlines() finally: lf.close() jobsSubmitted = 0 for line in lines1: line = line.strip() if line[0:1] not in ('0','1','2','3','4','5','6','7','8','9','('): continue # ignore unwanted text lines arr1=line.split(' ',7) if arr1[5] == "Bytes" or arr1[4] =="Image": continue if arr1[5] == "submitted": jobNum = arr1[1].strip('()') jobStartInfo.append(jobNum) jobStartInfo.append(arr1[3]) jobsSubmitted=jobsSubmitted+1 if arr1[5] == "executing": jobNum = arr1[1].strip('()') jobExecuteInfo.append(jobNum) jobExecuteInfo.append(arr1[3]) if arr1[5] == "terminated.": jobNum = arr1[1].strip('()') jobFinishInfo.append(jobNum) jobFinishInfo.append(arr1[3]) if arr1[4] == "value": status=arr1[5].split(')',1) jobFinishInfo.append(status[0]) # Set some variables minExeTime=1e20 maxExeTime=0 minFinTime=1e20 maxFinTime=0 iter=0 for i in range(0, len(jobStartInfo), 2): if jobStartInfo[i] in jobExecuteInfo: index = jobExecuteInfo.index(jobStartInfo[i]) timeJobStart = jobStartInfo[i + 1] timeJobExecute = jobExecuteInfo[index + 1] timeStart = timeJobStart.split(':', 2) timeExecute = timeJobExecute.split(':', 2) diffHours = (int(timeExecute[0]) - int(timeStart[0])) * 3600 diffMinutes = (int(timeExecute[1]) - int(timeStart[1])) * 60 diffSeconds = int(timeExecute[2]) - int(timeStart[2]) executeTime = diffHours + diffMinutes + diffSeconds index2 = jobFinishInfo.index(jobStartInfo[i]) timeJobFinish = jobFinishInfo[index2 + 1] stat = jobFinishInfo[index2 +2] timeFinish = timeJobFinish.split(':', 2) diffHours2 = (int(timeFinish[0]) - int(timeExecute[0])) * 3600 diffMinutes2 = (int(timeFinish[1]) - int(timeExecute[1])) * 60 diffSeconds2 = int(timeFinish[2]) - int(timeExecute[2]) finishTime = diffHours2 + diffMinutes2 + diffSeconds2 resultData = [iter, executeTime, finishTime, stat] results.append(resultData) iter = iter + 1 if executeTime > maxExeTime: maxExeTime = executeTime if executeTime < minExeTime: minExeTime = executeTime if finishTime > maxFinTime: maxFinTime = finishTime if finishTime < minFinTime: minFinTime = finishTime # Create summary directory structure filePath = summDir + 'con_' + concurrencyLevel[k] + '_run_' + str(l) + '.txt' file=open(filePath, 'w') header = "# Test Results for " + executable + " run at concurrency Level " + concurrencyLevel[k] + '\n\nJob#\tExecuteTime\tFinishTime\tReturnValue\n' file.write(header) exeTime=0 finTime=0 for i in range(0, int(concurrencyLevel[k])): exeTime = exeTime + results[i][1] finTime = finTime + results[i][2] writeData = str(results[i][0]) + '\t' + str(results[i][1]) + '\t\t' + str(results[i][2]) + '\t\t' + results[i][3] + '\n' file.write(writeData) aveExeTime = exeTime/int(concurrencyLevel[k]) aveFinTime = finTime/int(concurrencyLevel[k]) file.close() filepath = summDir + 'results.txt' file=open(filepath, 'a') times = "Concurrency Level = " + concurrencyLevel[k] + "\tExecute Time(Ave/Min/Max) = " + str(aveExeTime) + '/' + str(minExeTime) + '/' + str(maxExeTime) + "\tFinish Time(Ave/Min/Max) = " + str(aveFinTime) + "/" + str(minFinTime) + "/" + str(maxFinTime) + '\n' file.write(times) file.close() finally: main_log.write("%s %s\n"%(ctime(), "getting out")) main_log.flush() gktid.soft_kill() gktid.join() return
times = "Concurrency Level = " + concurrencyLevel[k] + "\tExecute Time(Ave/Min/Max) = " + str(aveExeTime) + '/' + str(minExeTime) + '/' + str(maxExeTime) + "\tFinish Time(Ave/Min/Max) = " + str(aveFinTime) + "/" + str(minFinTime) + "/" + str(maxFinTime) + '\n'
times = "Concurrency_Level = " + concurrencyLevel[k] + "\t Execute_Time_(Ave/Min/Max) = " + str(aveExeTime) + '/' + str(minExeTime) + '/' + str(maxExeTime) + "\t Finish_Time_(Ave/Min/Max) = " + str(aveFinTime) + "/" + str(minFinTime) + "/" + str(maxFinTime) + '\n'
def run(config): os.environ['_CONDOR_SEC_DEFAULT_AUTHENTICATION_METHODS']='GSI' os.environ['X509_USER_PROXY']=config.proxyFile import glideKeeper import condorMonitor,condorManager gktid=glideKeeper.GlideKeeperThread(config.webURL,config.descriptFile,config.descriptSignature, config.runId, config.myClassadID, [(config.gfactoryNode,config.gfactoryClassadID)],config.gfactoryConstraint, config.collectorNode, config.proxyFile) gktid.start() workingDir = os.getcwd() os.makedirs(workingDir + '/' + startTime) main_log_fname=workingDir + '/' + startTime + '/glideTester.log' main_log=open(main_log_fname,'w') try: # first load the file, so we check it is readable fd = open(config.params, 'r') try: lines = fd.readlines() finally: fd.close() # reset the values executable = None inputFile = None outputFile = None environment = None arguments = None concurrency = None runs = 1 # read the values for line in lines: line = line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr = line.split('=',1) if len(arr) != 2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key = arr[0].strip() val = arr[1].strip() if key == 'executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable = val elif key == 'transfer_input_files': inputFile = val elif key == 'transfer_output_files': outputFile = val elif key == 'environment': environment = val elif key == 'arguments': arguments = val elif key == 'concurrency': concurrency=val elif key == 'runs': runs = int(val) concurrencyLevel = concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe = 'vanilla' if executable == None: raise RuntimeError, "executable was not defined!" transfer_executable = "True" when_to_transfer_output = "ON_EXIT" requirements = '(GLIDEIN_Site =!= "UCSD12") && (Arch =!= "abc")' owner = 'Undefined' notification = 'Never' # Create a testing loop for each run for l in range(0, runs, 1): main_log.write("Iteration %i\n"%l) # Create a testing loop for each concurrency for k in range(0, len(concurrencyLevel), 1): main_log.write("Concurrency %i\n"%int(concurrencyLevel[k])) # request the glideins # we want 10% more glideins than the concurrency level requestedGlideins = int(concurrencyLevel[k]) totalGlideins = int(requestedGlideins + .1 * requestedGlideins) gktid.request_glideins(totalGlideins) main_log.write("%s %i Glideins requested\n"%(ctime(),totalGlideins)) # now we create the directories for each job and a submit file workingDir = os.getcwd() loop = 0 dir1 = workingDir + '/' + startTime + '/concurrency_' + concurrencyLevel[k] + '_run_' + str(l) + '/' os.makedirs(dir1) logfile = workingDir + '/' + startTime + '/con_' + concurrencyLevel[k] + '_run_' + str(l) + '.log' outputfile = 'concurrency_' + concurrencyLevel[k] + '.out' errorfile = 'concurrency_' + concurrencyLevel[k] + '.err' filename = executable + '_concurrency_' + concurrencyLevel[k] + '_run_' + str(l) + '_submit.condor' condorSubmitFile=open(filename, "w") condorSubmitFile.write('universe = ' + universe + '\n' + 'executable = ' + executable + '\n' + 'transfer_executable = ' + transfer_executable + '\n' + 'when_to_transfer_output = ' + when_to_transfer_output + '\n' + 'Requirements = ' + requirements + '\n' + '+Owner = ' + owner + '\n' + 'log = ' + logfile + '\n' + 'output = ' + outputfile + '\n' + 'error = ' + errorfile + '\n' + 'notification = ' + notification + '\n' + '+IsSleep = 1\n') if inputFile != None: condorSubmitFile.write('transfer_input_files = ' + inputFile + '\n') if outputFile != None: condorSubmitFile.write('transfer_output_files = ' + outputFile + '\n') if environment != None: condorSubmitFile.write('environment = ' + environment + '\n') if arguments != None: condorSubmitFile.write('Arguments = ' + arguments + '\n') condorSubmitFile.write('x509userproxy = ' + config.proxyFile + '\n\n') for j in range(0, int(concurrencyLevel[k]), 1): condorSubmitFile.write('Initialdir = ' + dir1 + 'job' + str(loop) + '\n') condorSubmitFile.write('Queue\n\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) condorSubmitFile.close() # Need to figure out when we have all the glideins # Ask the glidekeeper object finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() main_log.write("%s %s %s %s %s\n"%(ctime(), 'we have', numberGlideins, 'glideins, need', requestedGlideins)) main_log.flush() sleep(5) if numberGlideins >= requestedGlideins: finished = "true" # Now we begin submission and monitoring submission = condorManager.condorSubmitOne(filename) main_log.write("%s %s\n"%(ctime(), "file submitted")) shutil.move(filename, workingDir + '/' + startTime + '/' + filename) running = "true" while running != "false": check1 = condorMonitor.CondorQ() try: # i actually want to see all jos, not only running ones check1.load('JobStatus<3', []) data=check1.fetchStored() except: main_log.write("%s %s\n"%(ctime(), "condor_q failed... ignoring for now")) main_log.flush() sleep(2) continue # retry the while loop main_log.write("%s %s %s\n"%(ctime(), len(data.keys()), 'jobs running')) main_log.flush() if len(data.keys()) == 0: running = "false" main_log.write("%s %s\n"%(ctime(), "no more running jobs")) else: sleep(10) main_log.write("%s %s\n"%(ctime(), "Done")) # Now we parse the log files # Create a loop to parse each log file into a summaries directory summDir = workingDir + '/' + startTime + '/summaries/' os.makedirs(summDir) for l in range(0, runs, 1): for k in range(0, len(concurrencyLevel), 1): # Initialize empty arrays for data results=[] hours=[] minutes=[] seconds=[] jobStartInfo=[] jobExecuteInfo=[] jobFinishInfo=[] jobStatus=[] # Parse each log file logFile = workingDir + '/' + startTime + '/con_' + concurrencyLevel[k] + '_run_' + str(l) + '.log' lf = open(logFile, 'r') try: lines1 = lf.readlines() finally: lf.close() jobsSubmitted = 0 for line in lines1: line = line.strip() if line[0:1] not in ('0','1','2','3','4','5','6','7','8','9','('): continue # ignore unwanted text lines arr1=line.split(' ',7) if arr1[5] == "Bytes" or arr1[4] =="Image": continue if arr1[5] == "submitted": jobNum = arr1[1].strip('()') jobStartInfo.append(jobNum) jobStartInfo.append(arr1[3]) jobsSubmitted=jobsSubmitted+1 if arr1[5] == "executing": jobNum = arr1[1].strip('()') jobExecuteInfo.append(jobNum) jobExecuteInfo.append(arr1[3]) if arr1[5] == "terminated.": jobNum = arr1[1].strip('()') jobFinishInfo.append(jobNum) jobFinishInfo.append(arr1[3]) if arr1[4] == "value": status=arr1[5].split(')',1) jobFinishInfo.append(status[0]) # Set some variables minExeTime=1e20 maxExeTime=0 minFinTime=1e20 maxFinTime=0 iter=0 for i in range(0, len(jobStartInfo), 2): if jobStartInfo[i] in jobExecuteInfo: index = jobExecuteInfo.index(jobStartInfo[i]) timeJobStart = jobStartInfo[i + 1] timeJobExecute = jobExecuteInfo[index + 1] timeStart = timeJobStart.split(':', 2) timeExecute = timeJobExecute.split(':', 2) diffHours = (int(timeExecute[0]) - int(timeStart[0])) * 3600 diffMinutes = (int(timeExecute[1]) - int(timeStart[1])) * 60 diffSeconds = int(timeExecute[2]) - int(timeStart[2]) executeTime = diffHours + diffMinutes + diffSeconds index2 = jobFinishInfo.index(jobStartInfo[i]) timeJobFinish = jobFinishInfo[index2 + 1] stat = jobFinishInfo[index2 +2] timeFinish = timeJobFinish.split(':', 2) diffHours2 = (int(timeFinish[0]) - int(timeExecute[0])) * 3600 diffMinutes2 = (int(timeFinish[1]) - int(timeExecute[1])) * 60 diffSeconds2 = int(timeFinish[2]) - int(timeExecute[2]) finishTime = diffHours2 + diffMinutes2 + diffSeconds2 resultData = [iter, executeTime, finishTime, stat] results.append(resultData) iter = iter + 1 if executeTime > maxExeTime: maxExeTime = executeTime if executeTime < minExeTime: minExeTime = executeTime if finishTime > maxFinTime: maxFinTime = finishTime if finishTime < minFinTime: minFinTime = finishTime # Create summary directory structure filePath = summDir + 'con_' + concurrencyLevel[k] + '_run_' + str(l) + '.txt' file=open(filePath, 'w') header = "# Test Results for " + executable + " run at concurrency Level " + concurrencyLevel[k] + '\n\nJob#\tExecuteTime\tFinishTime\tReturnValue\n' file.write(header) exeTime=0 finTime=0 for i in range(0, int(concurrencyLevel[k])): exeTime = exeTime + results[i][1] finTime = finTime + results[i][2] writeData = str(results[i][0]) + '\t' + str(results[i][1]) + '\t\t' + str(results[i][2]) + '\t\t' + results[i][3] + '\n' file.write(writeData) aveExeTime = exeTime/int(concurrencyLevel[k]) aveFinTime = finTime/int(concurrencyLevel[k]) file.close() filepath = summDir + 'results.txt' file=open(filepath, 'a') times = "Concurrency Level = " + concurrencyLevel[k] + "\tExecute Time(Ave/Min/Max) = " + str(aveExeTime) + '/' + str(minExeTime) + '/' + str(maxExeTime) + "\tFinish Time(Ave/Min/Max) = " + str(aveFinTime) + "/" + str(minFinTime) + "/" + str(maxFinTime) + '\n' file.write(times) file.close() finally: main_log.write("%s %s\n"%(ctime(), "getting out")) main_log.flush() gktid.soft_kill() gktid.join() return
FILE.write('executable=' + executable' '\n')
FILE.write('executable=' + executable + '\n')
def run(config): import glideKeeper gktid=glideKeeper.glideKeeperThread(config.webUrl,self.descriptName,config.descriptSignature, config.runId, config.gfactoryClassadID, [config.gfactoryNode],config.gFactoryConstraint, config.proxyFile) gktid.start() try: # most of the code goes here # first load the file, so we check it is readable fd=open('parameters.cfg', 'r') try: lines=fd.readlines() finally: fd.close() # reset the values executable=None arguments=None concurrency=None owner=None # read the values for line in lines: line=line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr=line.split('=',1) if len(arr)!=2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key=arr[0].strip() val=arr[1].strip() if key=='executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable=val elif key=='owner': owner=val elif key=='arguments': arguments=val elif key='concurrency': concurrency=val concurrencyLevel=concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe='vanilla' if executable==None: raise RuntimeError, "executable was not defined!" executable=raw_input("Enter executable: "); transfer_executable="True" when_to_transfer_output="ON_EXIT" requirements='(GLIDEIN_Site =!= "UCSD12") && (Arch=!="abc")' if owner==None: owner='Undefined' notification='Never' # Create a testing loop for each concurrency for i in range(0, len(concurrencyLevel), 1): # request the glideins # we want 10% more glideins than the concurrency level totalGlideins=int(int(concurrencyLevel[i]) + .1 * int(concurrencyLevel[i]))
totalGlideins = int(int(concurrencyLevel[i]) + .1 * int(concurrencyLevel[i]))
requestedGlideins = int(concurrencyLevel[i]) totalGlideins = int(requestedGlideins + .1 * requestedGlideins))
def run(config): import glideKeeper gktid=glideKeeper.glideKeeperThread(config.webUrl,self.descriptName,config.descriptSignature, config.runId, config.gfactoryClassadID, [config.gfactoryNode],config.gFactoryConstraint, config.proxyFile) gktid.start() try: # most of the code goes here # first load the file, so we check it is readable fd = open('parameters.cfg', 'r') try: lines = fd.readlines() finally: fd.close() # reset the values executable = None arguments = None concurrency = None owner = None # read the values for line in lines: line = line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr = line.split('=',1) if len(arr) != 2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key = arr[0].strip() val = arr[1].strip() if key == 'executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable = val elif key == 'owner': owner=val elif key == 'arguments': arguments = val elif key == 'concurrency': concurrency=val concurrencyLevel = concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe = 'vanilla' if executable == None: raise RuntimeError, "executable was not defined!" executable = raw_input("Enter executable: "); transfer_executable = "True" when_to_transfer_output = "ON_EXIT" requirements = '(GLIDEIN_Site =!= "UCSD12") && (Arch =!= "abc")' if owner == None: owner = 'Undefined' notification = 'Never' # Create a testing loop for each concurrency results = [] for i in range(0, len(concurrencyLevel), 1): # request the glideins # we want 10% more glideins than the concurrency level totalGlideins = int(int(concurrencyLevel[i]) + .1 * int(concurrencyLevel[i])) gktid.request_glideins(totalGlideins) # now we create the directories for each job and a submit file workingDir = os.getcwd() for k in range(0, len(concurrencyLevel), 1): loop = 0 dir1 = workingDir + '/' + 'test' + concurrencyLevel[k] + '/' os.makedirs(dir1) logfile = dir1 + 'test' + concurrencyLevel[k] + '.log' outputfile = 'test' + concurrencyLevel[k] + '.out' errorfile = 'test' + concurrencyLevel[k] + '.err' filename = dir1 + 'submit' + '.condor' FILE=open(filename, "w") FILE.write('universe=' + universe + '\n') FILE.write('executable=' + executable + '\n') FILE.write('transfer_executable=' + transfer_executable + '\n') FILE.write('when_to_transfer_output=' + when_to_transfer_output + '\n') FILE.write('Requirements=' + requirements + '\n') FILE.write('+Owner=' + owner + '\n') FILE.write('log=' + logfile + '\n') FILE.write('output=' + outputfile + '\n') FILE.write('error=' + errorfile + '\n') FILE.write('notification=' + notification + '\n' + '\n') if arguments != None: FILE.write('Arguments =' + arguments + '\n') for j in range(0, int(concurrencyLevel[k]), 1): FILE.write('Initialdir = ' + 'job' + str(loop) + '\n') FILE.write('Queue' + '\n' + '\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) FILE.close() # need to figure out when we have all the glideins # need to ask the glidekeeper object finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() if numberGlideins = totalGlideins: finished = "true" # now we begin submission and monitoring # Need to figure this part out submission = condorManager.condorSubmitOne(filename) running = "true" while running ! ="false": check1 = condorMonitor.CondorQ() # Not sure if this is the correct constraint to put on the monitor if check1 == None: running = "false" # Need to check log files for when first # job submitted and last job finished hours = [] minutes = [] seconds = [] logCheck = open(logfile, 'r') try: lines1 = logCheck.readlines() finally: logCheck.close() for line in lines1: line = line.strip() if line[0:1] in ('(','.','U','R','J','C','G'): continue # ignore unwanted text lines arr1 = line.split(') ',1) if len(arr1) < 2: continue arr2 = arr1[1].split(' ',2) time = arr2[1].split(':',2) hours.append(int(time[0])) minutes.append(int(time[1])) seconds.append(int(time[2])) diffHours = (hours[len(hours)-1] - hours[0]) * 3600 diffMinutes = (minutes[len(minutes)-1] - minutes[0]) * 60 diffSeconds = seconds[len(seconds)-1] - seconds[0] totalTime = diffHours + diffMinutes + diffSeconds final = [totalTime, concurrencyLevel[k]] results.append(final) # Cleanup all the directories and files made shutil.rmtree(dir1)
filename = dir1 + 'submit' + '.condor'
filename = dir1 + 'submit.condor'
def run(config): import glideKeeper gktid=glideKeeper.glideKeeperThread(config.webUrl,self.descriptName,config.descriptSignature, config.runId, config.gfactoryClassadID, [config.gfactoryNode],config.gFactoryConstraint, config.proxyFile) gktid.start() try: # most of the code goes here # first load the file, so we check it is readable fd = open('parameters.cfg', 'r') try: lines = fd.readlines() finally: fd.close() # reset the values executable = None arguments = None concurrency = None owner = None # read the values for line in lines: line = line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr = line.split('=',1) if len(arr) != 2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key = arr[0].strip() val = arr[1].strip() if key == 'executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable = val elif key == 'owner': owner=val elif key == 'arguments': arguments = val elif key == 'concurrency': concurrency=val concurrencyLevel = concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe = 'vanilla' if executable == None: raise RuntimeError, "executable was not defined!" executable = raw_input("Enter executable: "); transfer_executable = "True" when_to_transfer_output = "ON_EXIT" requirements = '(GLIDEIN_Site =!= "UCSD12") && (Arch =!= "abc")' if owner == None: owner = 'Undefined' notification = 'Never' # Create a testing loop for each concurrency results = [] for i in range(0, len(concurrencyLevel), 1): # request the glideins # we want 10% more glideins than the concurrency level totalGlideins = int(int(concurrencyLevel[i]) + .1 * int(concurrencyLevel[i])) gktid.request_glideins(totalGlideins) # now we create the directories for each job and a submit file workingDir = os.getcwd() for k in range(0, len(concurrencyLevel), 1): loop = 0 dir1 = workingDir + '/' + 'test' + concurrencyLevel[k] + '/' os.makedirs(dir1) logfile = dir1 + 'test' + concurrencyLevel[k] + '.log' outputfile = 'test' + concurrencyLevel[k] + '.out' errorfile = 'test' + concurrencyLevel[k] + '.err' filename = dir1 + 'submit' + '.condor' FILE=open(filename, "w") FILE.write('universe=' + universe + '\n') FILE.write('executable=' + executable + '\n') FILE.write('transfer_executable=' + transfer_executable + '\n') FILE.write('when_to_transfer_output=' + when_to_transfer_output + '\n') FILE.write('Requirements=' + requirements + '\n') FILE.write('+Owner=' + owner + '\n') FILE.write('log=' + logfile + '\n') FILE.write('output=' + outputfile + '\n') FILE.write('error=' + errorfile + '\n') FILE.write('notification=' + notification + '\n' + '\n') if arguments != None: FILE.write('Arguments =' + arguments + '\n') for j in range(0, int(concurrencyLevel[k]), 1): FILE.write('Initialdir = ' + 'job' + str(loop) + '\n') FILE.write('Queue' + '\n' + '\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) FILE.close() # need to figure out when we have all the glideins # need to ask the glidekeeper object finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() if numberGlideins = totalGlideins: finished = "true" # now we begin submission and monitoring # Need to figure this part out submission = condorManager.condorSubmitOne(filename) running = "true" while running ! ="false": check1 = condorMonitor.CondorQ() # Not sure if this is the correct constraint to put on the monitor if check1 == None: running = "false" # Need to check log files for when first # job submitted and last job finished hours = [] minutes = [] seconds = [] logCheck = open(logfile, 'r') try: lines1 = logCheck.readlines() finally: logCheck.close() for line in lines1: line = line.strip() if line[0:1] in ('(','.','U','R','J','C','G'): continue # ignore unwanted text lines arr1 = line.split(') ',1) if len(arr1) < 2: continue arr2 = arr1[1].split(' ',2) time = arr2[1].split(':',2) hours.append(int(time[0])) minutes.append(int(time[1])) seconds.append(int(time[2])) diffHours = (hours[len(hours)-1] - hours[0]) * 3600 diffMinutes = (minutes[len(minutes)-1] - minutes[0]) * 60 diffSeconds = seconds[len(seconds)-1] - seconds[0] totalTime = diffHours + diffMinutes + diffSeconds final = [totalTime, concurrencyLevel[k]] results.append(final) # Cleanup all the directories and files made shutil.rmtree(dir1)
if numberGlideins = totalGlideins:
if numberGlideins = requestedGlideins:
def run(config): import glideKeeper gktid=glideKeeper.glideKeeperThread(config.webUrl,self.descriptName,config.descriptSignature, config.runId, config.gfactoryClassadID, [config.gfactoryNode],config.gFactoryConstraint, config.proxyFile) gktid.start() try: # most of the code goes here # first load the file, so we check it is readable fd = open('parameters.cfg', 'r') try: lines = fd.readlines() finally: fd.close() # reset the values executable = None arguments = None concurrency = None owner = None # read the values for line in lines: line = line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr = line.split('=',1) if len(arr) != 2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key = arr[0].strip() val = arr[1].strip() if key == 'executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable = val elif key == 'owner': owner=val elif key == 'arguments': arguments = val elif key == 'concurrency': concurrency=val concurrencyLevel = concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe = 'vanilla' if executable == None: raise RuntimeError, "executable was not defined!" executable = raw_input("Enter executable: "); transfer_executable = "True" when_to_transfer_output = "ON_EXIT" requirements = '(GLIDEIN_Site =!= "UCSD12") && (Arch =!= "abc")' if owner == None: owner = 'Undefined' notification = 'Never' # Create a testing loop for each concurrency results = [] for i in range(0, len(concurrencyLevel), 1): # request the glideins # we want 10% more glideins than the concurrency level totalGlideins = int(int(concurrencyLevel[i]) + .1 * int(concurrencyLevel[i])) gktid.request_glideins(totalGlideins) # now we create the directories for each job and a submit file workingDir = os.getcwd() for k in range(0, len(concurrencyLevel), 1): loop = 0 dir1 = workingDir + '/' + 'test' + concurrencyLevel[k] + '/' os.makedirs(dir1) logfile = dir1 + 'test' + concurrencyLevel[k] + '.log' outputfile = 'test' + concurrencyLevel[k] + '.out' errorfile = 'test' + concurrencyLevel[k] + '.err' filename = dir1 + 'submit' + '.condor' FILE=open(filename, "w") FILE.write('universe=' + universe + '\n') FILE.write('executable=' + executable + '\n') FILE.write('transfer_executable=' + transfer_executable + '\n') FILE.write('when_to_transfer_output=' + when_to_transfer_output + '\n') FILE.write('Requirements=' + requirements + '\n') FILE.write('+Owner=' + owner + '\n') FILE.write('log=' + logfile + '\n') FILE.write('output=' + outputfile + '\n') FILE.write('error=' + errorfile + '\n') FILE.write('notification=' + notification + '\n' + '\n') if arguments != None: FILE.write('Arguments =' + arguments + '\n') for j in range(0, int(concurrencyLevel[k]), 1): FILE.write('Initialdir = ' + 'job' + str(loop) + '\n') FILE.write('Queue' + '\n' + '\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) FILE.close() # need to figure out when we have all the glideins # need to ask the glidekeeper object finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() if numberGlideins = totalGlideins: finished = "true" # now we begin submission and monitoring # Need to figure this part out submission = condorManager.condorSubmitOne(filename) running = "true" while running ! ="false": check1 = condorMonitor.CondorQ() # Not sure if this is the correct constraint to put on the monitor if check1 == None: running = "false" # Need to check log files for when first # job submitted and last job finished hours = [] minutes = [] seconds = [] logCheck = open(logfile, 'r') try: lines1 = logCheck.readlines() finally: logCheck.close() for line in lines1: line = line.strip() if line[0:1] in ('(','.','U','R','J','C','G'): continue # ignore unwanted text lines arr1 = line.split(') ',1) if len(arr1) < 2: continue arr2 = arr1[1].split(' ',2) time = arr2[1].split(':',2) hours.append(int(time[0])) minutes.append(int(time[1])) seconds.append(int(time[2])) diffHours = (hours[len(hours)-1] - hours[0]) * 3600 diffMinutes = (minutes[len(minutes)-1] - minutes[0]) * 60 diffSeconds = seconds[len(seconds)-1] - seconds[0] totalTime = diffHours + diffMinutes + diffSeconds final = [totalTime, concurrencyLevel[k]] results.append(final) # Cleanup all the directories and files made shutil.rmtree(dir1)
final = [totalTime, concurrencyLevel[k]]
final = [totalTime, concurrencyLevel[i]]
def run(config): import glideKeeper gktid=glideKeeper.glideKeeperThread(config.webUrl,self.descriptName,config.descriptSignature, config.runId, config.gfactoryClassadID, [config.gfactoryNode],config.gFactoryConstraint, config.proxyFile) gktid.start() try: # most of the code goes here # first load the file, so we check it is readable fd = open('parameters.cfg', 'r') try: lines = fd.readlines() finally: fd.close() # reset the values executable = None arguments = None concurrency = None owner = None # read the values for line in lines: line = line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr = line.split('=',1) if len(arr) != 2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key = arr[0].strip() val = arr[1].strip() if key == 'executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable = val elif key == 'owner': owner=val elif key == 'arguments': arguments = val elif key == 'concurrency': concurrency=val concurrencyLevel = concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe = 'vanilla' if executable == None: raise RuntimeError, "executable was not defined!" executable = raw_input("Enter executable: "); transfer_executable = "True" when_to_transfer_output = "ON_EXIT" requirements = '(GLIDEIN_Site =!= "UCSD12") && (Arch =!= "abc")' if owner == None: owner = 'Undefined' notification = 'Never' # Create a testing loop for each concurrency results = [] for i in range(0, len(concurrencyLevel), 1): # request the glideins # we want 10% more glideins than the concurrency level totalGlideins = int(int(concurrencyLevel[i]) + .1 * int(concurrencyLevel[i])) gktid.request_glideins(totalGlideins) # now we create the directories for each job and a submit file workingDir = os.getcwd() for k in range(0, len(concurrencyLevel), 1): loop = 0 dir1 = workingDir + '/' + 'test' + concurrencyLevel[k] + '/' os.makedirs(dir1) logfile = dir1 + 'test' + concurrencyLevel[k] + '.log' outputfile = 'test' + concurrencyLevel[k] + '.out' errorfile = 'test' + concurrencyLevel[k] + '.err' filename = dir1 + 'submit' + '.condor' FILE=open(filename, "w") FILE.write('universe=' + universe + '\n') FILE.write('executable=' + executable + '\n') FILE.write('transfer_executable=' + transfer_executable + '\n') FILE.write('when_to_transfer_output=' + when_to_transfer_output + '\n') FILE.write('Requirements=' + requirements + '\n') FILE.write('+Owner=' + owner + '\n') FILE.write('log=' + logfile + '\n') FILE.write('output=' + outputfile + '\n') FILE.write('error=' + errorfile + '\n') FILE.write('notification=' + notification + '\n' + '\n') if arguments != None: FILE.write('Arguments =' + arguments + '\n') for j in range(0, int(concurrencyLevel[k]), 1): FILE.write('Initialdir = ' + 'job' + str(loop) + '\n') FILE.write('Queue' + '\n' + '\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) FILE.close() # need to figure out when we have all the glideins # need to ask the glidekeeper object finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() if numberGlideins = totalGlideins: finished = "true" # now we begin submission and monitoring # Need to figure this part out submission = condorManager.condorSubmitOne(filename) running = "true" while running ! ="false": check1 = condorMonitor.CondorQ() # Not sure if this is the correct constraint to put on the monitor if check1 == None: running = "false" # Need to check log files for when first # job submitted and last job finished hours = [] minutes = [] seconds = [] logCheck = open(logfile, 'r') try: lines1 = logCheck.readlines() finally: logCheck.close() for line in lines1: line = line.strip() if line[0:1] in ('(','.','U','R','J','C','G'): continue # ignore unwanted text lines arr1 = line.split(') ',1) if len(arr1) < 2: continue arr2 = arr1[1].split(' ',2) time = arr2[1].split(':',2) hours.append(int(time[0])) minutes.append(int(time[1])) seconds.append(int(time[2])) diffHours = (hours[len(hours)-1] - hours[0]) * 3600 diffMinutes = (minutes[len(minutes)-1] - minutes[0]) * 60 diffSeconds = seconds[len(seconds)-1] - seconds[0] totalTime = diffHours + diffMinutes + diffSeconds final = [totalTime, concurrencyLevel[k]] results.append(final) # Cleanup all the directories and files made shutil.rmtree(dir1)
self.needed_glidein=needed_glideins
self.needed_glideins=needed_glideins
def request_glideins(self,needed_glideins): self.needed_glidein=needed_glideins
factory_glidein_dict=glideinFrontendInterface.findGlideins(factory_pool_node,self.signature_type,self.factory_constraint,self.proxy_data!=None,get_only_matching=True)
factory_glidein_dict=glideinFrontendInterface.findGlideins(factory_pool_node,factory_identity,self.signature_type,self.factory_constraint,self.proxy_data!=None,get_only_matching=True)
def go_request_glideins(self): # query job collector pool_status=condorMonitor.CondorStatus() pool_status.load(None,[]) running_glideins=len(pool_status.fetchStored()) del pool_status self.running_glideins=running_glideins
glidein_dict[(factory_pool_node,glidename)]=factory_glidein_dict[glidename]
glidein_el=factory_glidein_dict[glidename] if not glidein_el['attrs'].has_key('PubKeyType'): continue elif glidein_el['attrs']['PubKeyType']=='RSA': try: glidein_el['attrs']['PubKeyObj']=glideinFrontendInterface.pubCrypto.PubRSAKey(str(string.replace(glidein_el['attrs']['PubKeyValue'],'\\n','\n'))) glidein_dict[(factory_pool_node,glidename)]=glidein_el except: continue else: continue
def go_request_glideins(self): # query job collector pool_status=condorMonitor.CondorStatus() pool_status.load(None,[]) running_glideins=len(pool_status.fetchStored()) del pool_status self.running_glideins=running_glideins
key_obj=key_builder.get_key_obj(self.classad_identity,
key_obj=key_builder.get_key_obj(self.classad_id,
def go_request_glideins(self): # query job collector pool_status=condorMonitor.CondorStatus() pool_status.load(None,[]) running_glideins=len(pool_status.fetchStored()) del pool_status self.running_glideins=running_glideins
glideinkeeper_id,classad_id,
glidekeeper_id,classad_id,
def __init__(self, web_url,descript_fname,descript_signature, glideinkeeper_id,classad_id, factory_pools,factory_constraint, proxy_fname): threading.Thread.__init__(self) # consts self.signature_type = "sha1" self.max_request=100
gktid=glideKeeper.glideKeeperThread(config.webUrl,self.descriptName,config.descriptSignature,
gktid=glideKeeper.GlideKeeperThread(config.webUrl,self.descriptName,config.descriptSignature,
def run(config): import glideKeeper gktid=glideKeeper.glideKeeperThread(config.webUrl,self.descriptName,config.descriptSignature, config.runId, config.gfactoryClassadID, [config.gfactoryNode],config.gFactoryConstraint, config.proxyFile) gktid.start() try: # most of the code goes here # first load the file, so we check it is readable fd = open('parameters.cfg', 'r') try: lines = fd.readlines() finally: fd.close() # reset the values executable = None arguments = None concurrency = None owner = None # read the values for line in lines: line = line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr = line.split('=',1) if len(arr) != 2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key = arr[0].strip() val = arr[1].strip() if key == 'executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable = val elif key == 'owner': owner=val elif key == 'arguments': arguments = val elif key == 'concurrency': concurrency=val concurrencyLevel = concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe = 'vanilla' if executable == None: raise RuntimeError, "executable was not defined!" executable = raw_input("Enter executable: "); transfer_executable = "True" when_to_transfer_output = "ON_EXIT" requirements = '(GLIDEIN_Site =!= "UCSD12") && (Arch =!= "abc")' if owner == None: owner = 'Undefined' notification = 'Never' # Create a testing loop for each concurrency results = [] for i in range(0, len(concurrencyLevel), 1): # request the glideins # we want 10% more glideins than the concurrency level requestedGlideins = int(concurrencyLevel[i]) totalGlideins = int(requestedGlideins + .1 * requestedGlideins)) gktid.request_glideins(totalGlideins) # now we create the directories for each job and a submit file workingDir = os.getcwd() for k in range(0, len(concurrencyLevel), 1): loop = 0 dir1 = workingDir + '/' + 'test' + concurrencyLevel[k] + '/' os.makedirs(dir1) logfile = dir1 + 'test' + concurrencyLevel[k] + '.log' outputfile = 'test' + concurrencyLevel[k] + '.out' errorfile = 'test' + concurrencyLevel[k] + '.err' filename = dir1 + 'submit.condor' FILE=open(filename, "w") FILE.write('universe=' + universe + '\n') FILE.write('executable=' + executable + '\n') FILE.write('transfer_executable=' + transfer_executable + '\n') FILE.write('when_to_transfer_output=' + when_to_transfer_output + '\n') FILE.write('Requirements=' + requirements + '\n') FILE.write('+Owner=' + owner + '\n') FILE.write('log=' + logfile + '\n') FILE.write('output=' + outputfile + '\n') FILE.write('error=' + errorfile + '\n') FILE.write('notification=' + notification + '\n' + '\n') if arguments != None: FILE.write('Arguments =' + arguments + '\n') for j in range(0, int(concurrencyLevel[k]), 1): FILE.write('Initialdir = ' + 'job' + str(loop) + '\n') FILE.write('Queue' + '\n' + '\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) FILE.close() # Need to figure out when we have all the glideins # Ask the glidekeeper object finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() if numberGlideins = requestedGlideins: finished = "true" # Now we begin submission and monitoring ## Need to figure this part out submission = condorManager.condorSubmitOne(filename) running = "true" while running != "false": check1 = condorMonitor.CondorQ() # Not sure if this is the correct constraint to put on the monitor if check1 == None: running = "false" # Need to check log files for when first # job submitted and last job finished hours = [] minutes = [] seconds = [] logCheck = open(logfile, 'r') try: lines1 = logCheck.readlines() finally: logCheck.close() for line in lines1: line = line.strip() if line[0:1] in ('(','.','U','R','J','C','G'): continue # ignore unwanted text lines arr1 = line.split(') ',1) if len(arr1) < 2: continue arr2 = arr1[1].split(' ',2) time = arr2[1].split(':',2) hours.append(int(time[0])) minutes.append(int(time[1])) seconds.append(int(time[2])) diffHours = (hours[len(hours)-1] - hours[0]) * 3600 diffMinutes = (minutes[len(minutes)-1] - minutes[0]) * 60 diffSeconds = seconds[len(seconds)-1] - seconds[0] totalTime = diffHours + diffMinutes + diffSeconds final = [totalTime, concurrencyLevel[i]] results.append(final) # Cleanup all the directories and files made shutil.rmtree(dir1) # Write results to a data file for plotting
gktid=glideKeeper.GlideKeeperThread(config.webUrl,self.descriptName,config.descriptSignature,
gktid=glideKeeper.GlideKeeperThread(config.webURL,self.descriptName,config.descriptSignature,
def run(config): import glideKeeper gktid=glideKeeper.GlideKeeperThread(config.webUrl,self.descriptName,config.descriptSignature, config.runId, config.gfactoryClassadID, [config.gfactoryNode],config.gFactoryConstraint, config.proxyFile) gktid.start() try: # most of the code goes here # first load the file, so we check it is readable fd = open('parameters.cfg', 'r') try: lines = fd.readlines() finally: fd.close() # reset the values executable = None arguments = None concurrency = None owner = None # read the values for line in lines: line = line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr = line.split('=',1) if len(arr) != 2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key = arr[0].strip() val = arr[1].strip() if key == 'executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable = val elif key == 'owner': owner=val elif key == 'arguments': arguments = val elif key == 'concurrency': concurrency=val concurrencyLevel = concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe = 'vanilla' if executable == None: raise RuntimeError, "executable was not defined!" executable = raw_input("Enter executable: "); transfer_executable = "True" when_to_transfer_output = "ON_EXIT" requirements = '(GLIDEIN_Site =!= "UCSD12") && (Arch =!= "abc")' if owner == None: owner = 'Undefined' notification = 'Never' # Create a testing loop for each concurrency results = [] for i in range(0, len(concurrencyLevel), 1): # request the glideins # we want 10% more glideins than the concurrency level requestedGlideins = int(concurrencyLevel[i]) totalGlideins = int(requestedGlideins + .1 * requestedGlideins)) gktid.request_glideins(totalGlideins) # now we create the directories for each job and a submit file workingDir = os.getcwd() for k in range(0, len(concurrencyLevel), 1): loop = 0 dir1 = workingDir + '/' + 'test' + concurrencyLevel[k] + '/' os.makedirs(dir1) logfile = dir1 + 'test' + concurrencyLevel[k] + '.log' outputfile = 'test' + concurrencyLevel[k] + '.out' errorfile = 'test' + concurrencyLevel[k] + '.err' filename = dir1 + 'submit.condor' FILE=open(filename, "w") FILE.write('universe=' + universe + '\n') FILE.write('executable=' + executable + '\n') FILE.write('transfer_executable=' + transfer_executable + '\n') FILE.write('when_to_transfer_output=' + when_to_transfer_output + '\n') FILE.write('Requirements=' + requirements + '\n') FILE.write('+Owner=' + owner + '\n') FILE.write('log=' + logfile + '\n') FILE.write('output=' + outputfile + '\n') FILE.write('error=' + errorfile + '\n') FILE.write('notification=' + notification + '\n' + '\n') if arguments != None: FILE.write('Arguments =' + arguments + '\n') for j in range(0, int(concurrencyLevel[k]), 1): FILE.write('Initialdir = ' + 'job' + str(loop) + '\n') FILE.write('Queue' + '\n' + '\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) FILE.close() # Need to figure out when we have all the glideins # Ask the glidekeeper object finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() if numberGlideins = requestedGlideins: finished = "true" # Now we begin submission and monitoring ## Need to figure this part out submission = condorManager.condorSubmitOne(filename) running = "true" while running != "false": check1 = condorMonitor.CondorQ() # Not sure if this is the correct constraint to put on the monitor if check1 == None: running = "false" # Need to check log files for when first # job submitted and last job finished hours = [] minutes = [] seconds = [] logCheck = open(logfile, 'r') try: lines1 = logCheck.readlines() finally: logCheck.close() for line in lines1: line = line.strip() if line[0:1] in ('(','.','U','R','J','C','G'): continue # ignore unwanted text lines arr1 = line.split(') ',1) if len(arr1) < 2: continue arr2 = arr1[1].split(' ',2) time = arr2[1].split(':',2) hours.append(int(time[0])) minutes.append(int(time[1])) seconds.append(int(time[2])) diffHours = (hours[len(hours)-1] - hours[0]) * 3600 diffMinutes = (minutes[len(minutes)-1] - minutes[0]) * 60 diffSeconds = seconds[len(seconds)-1] - seconds[0] totalTime = diffHours + diffMinutes + diffSeconds final = [totalTime, concurrencyLevel[i]] results.append(final) # Cleanup all the directories and files made shutil.rmtree(dir1) # Write results to a data file for plotting
for k in range(0, len(concurrencyLevel), 1): requestedGlideins = int(concurrencyLevel[k]) totalGlideins = int(requestedGlideins + .1 * requestedGlideins) gktid.request_glideins(totalGlideins)
for l in range(0, runs, 1): main_log.write("Iteration %i\n"%l) for k in range(0, len(concurrencyLevel), 1): main_log.write("Concurrency %i\n"%int(concurrencyLevel[k])) requestedGlideins = int(concurrencyLevel[k]) totalGlideins = int(requestedGlideins + .1 * requestedGlideins) gktid.request_glideins(totalGlideins) main_log.write("%s %i Glideins requested\n"%(ctime(),totalGlideins))
def run(config): os.environ['_CONDOR_SEC_DEFAULT_AUTHENTICATION_METHODS']='GSI' os.environ['X509_USER_PROXY']=config.proxyFile import glideKeeper gktid=glideKeeper.GlideKeeperThread(config.webURL,config.descriptFile,config.descriptSignature, config.runId, config.myClassadID, [(config.gfactoryNode,config.gfactoryClassadID)],config.gfactoryConstraint, config.collectorNode, config.proxyFile) gktid.start() try: # first load the file, so we check it is readable fd = open('parameters.cfg', 'r') try: lines = fd.readlines() finally: fd.close() # reset the values executable = None inputFile = None outputFile = None environment = None arguments = None concurrency = None # read the values for line in lines: line = line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr = line.split('=',1) if len(arr) != 2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key = arr[0].strip() val = arr[1].strip() if key == 'executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable = val elif key == 'transfer_input_files': inputFile = val elif key == 'transfer_output_files': outputFile = val elif key == 'environment': environment = val elif key == 'arguments': arguments = val elif key == 'concurrency': concurrency=val concurrencyLevel = concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe = 'vanilla' if executable == None: raise RuntimeError, "executable was not defined!" executable = raw_input("Enter executable: "); transfer_executable = "True" when_to_transfer_output = "ON_EXIT" requirements = '(GLIDEIN_Site =!= "UCSD12") && (Arch =!= "abc")' owner = 'Undefined' notification = 'Never' # Create a testing loop for each concurrency for k in range(0, len(concurrencyLevel), 1): # request the glideins # we want 10% more glideins than the concurrency level requestedGlideins = int(concurrencyLevel[k]) totalGlideins = int(requestedGlideins + .1 * requestedGlideins) gktid.request_glideins(totalGlideins) # now we create the directories for each job and a submit file workingDir = os.getcwd() loop = 0 dir1 = workingDir + '/test' + concurrencyLevel[k] + '/' os.makedirs(dir1) logfile = workingDir + '/test' + concurrencyLevel[k] + '.log' outputfile = 'test' + concurrencyLevel[k] + '.out' errorfile = 'test' + concurrencyLevel[k] + '.err' filename = 'submit.condor' condorSubmitFile = open(filename, "w") condorSubmitFile.write('universe = ' + universe + '\n') condorSubmitFile.write('executable = ' + executable + '\n') condorSubmitFile.write('transfer_executable = ' + transfer_executable + '\n') if inputFile != None: condorSubmitFile.write('transfer_input_files = ' + inputFile + '\n') if outputFile != None: condorSubmitFile.write('transfer_output_files = ' + outputFile + '\n') if environment != None: condorSubmitFile.write('environment = ' + environment + '\n') condorSubmitFile.write('when_to_transfer_output = ' + when_to_transfer_output + '\n') condorSubmitFile.write('Requirements = ' + requirements + '\n') condorSubmitFile.write('+Owner = ' + owner + '\n') condorSubmitFile.write('log = ' + logfile + '\n') condorSubmitFile.write('output = ' + outputfile + '\n') condorSubmitFile.write('error = ' + errorfile + '\n') condorSubmitFile.write('notification = ' + notification + '\n') condorSubmitFile.write('+IsSleep = 1\n') condorSubmitFile.write('x509userproxy = ' + config.proxyFile + '\n\n') if arguments != None: condorSubmitFile.write('Arguments = ' + arguments + '\n') for j in range(0, int(concurrencyLevel[k]), 1): condorSubmitFile.write('Initialdir = ' dir1 + 'job' + str(loop) + '\n') condorSubmitFile.write('Queue\n\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) condorSubmitFile.close() # Need to figure out when we have all the glideins # Ask the glidekeeper object finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() print numberGlideins time.sleep(5) if numberGlideins >= requestedGlideins: finished = "true" # Now we begin submission and monitoring submission = condorManager.condorSubmitOne(filename) running = "true" while running != "false": check1 = condorMonitor.CondorQ() check1.load('Status==3') data = check1.fetch() if len(data.keys()) == 0: running = "false" # Cleanup all the directories and files made shutil.rmtree(dir1) finally: gktid.soft_kill() gktid.join() return
workingDir = os.getcwd() loop = 0 dir1 = workingDir + '/test' + concurrencyLevel[k] + '/' os.makedirs(dir1) logfile = workingDir + '/test' + concurrencyLevel[k] + '.log' outputfile = 'test' + concurrencyLevel[k] + '.out' errorfile = 'test' + concurrencyLevel[k] + '.err' filename = 'submit.condor' condorSubmitFile = open(filename, "w") condorSubmitFile.write('universe = ' + universe + '\n') condorSubmitFile.write('executable = ' + executable + '\n') condorSubmitFile.write('transfer_executable = ' + transfer_executable + '\n') if inputFile != None: condorSubmitFile.write('transfer_input_files = ' + inputFile + '\n') if outputFile != None: condorSubmitFile.write('transfer_output_files = ' + outputFile + '\n') if environment != None: condorSubmitFile.write('environment = ' + environment + '\n') condorSubmitFile.write('when_to_transfer_output = ' + when_to_transfer_output + '\n') condorSubmitFile.write('Requirements = ' + requirements + '\n') condorSubmitFile.write('+Owner = ' + owner + '\n') condorSubmitFile.write('log = ' + logfile + '\n') condorSubmitFile.write('output = ' + outputfile + '\n') condorSubmitFile.write('error = ' + errorfile + '\n') condorSubmitFile.write('notification = ' + notification + '\n') condorSubmitFile.write('+IsSleep = 1\n') condorSubmitFile.write('x509userproxy = ' + config.proxyFile + '\n\n') if arguments != None: condorSubmitFile.write('Arguments = ' + arguments + '\n') for j in range(0, int(concurrencyLevel[k]), 1): condorSubmitFile.write('Initialdir = ' dir1 + 'job' + str(loop) + '\n') condorSubmitFile.write('Queue\n\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) condorSubmitFile.close() finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() print numberGlideins time.sleep(5) if numberGlideins >= requestedGlideins: finished = "true" submission = condorManager.condorSubmitOne(filename) running = "true" while running != "false": check1 = condorMonitor.CondorQ() check1.load('Status==3') data = check1.fetch() if len(data.keys()) == 0: running = "false" shutil.rmtree(dir1)
workingDir = os.getcwd() loop = 0 dir1 = workingDir + '/' + startTime + '/concurrency_' + concurrencyLevel[k] + '_run_' + str(l) + '/' os.makedirs(dir1) logfile = workingDir + '/' + startTime + '/con_' + concurrencyLevel[k] + '_run_' + str(l) + '.log' outputfile = 'concurrency_' + concurrencyLevel[k] + '.out' errorfile = 'concurrency_' + concurrencyLevel[k] + '.err' filename = executable + '_concurrency_' + concurrencyLevel[k] + '_run_' + str(l) + '_submit.condor' condorSubmitFile=open(filename, "w") condorSubmitFile.write('universe = ' + universe + '\n') condorSubmitFile.write('executable = ' + executable + '\n') condorSubmitFile.write('transfer_executable = ' + transfer_executable + '\n') if inputFile != None: condorSubmitFile.write('transfer_input_files = ' + inputFile + '\n') if outputFile != None: condorSubmitFile.write('transfer_output_files = ' + outputFile + '\n') if environment != None: condorSubmitFile.write('environment = ' + environment + '\n') condorSubmitFile.write('when_to_transfer_output = ' + when_to_transfer_output + '\n') condorSubmitFile.write('Requirements = ' + requirements + '\n') condorSubmitFile.write('+Owner = ' + owner + '\n') condorSubmitFile.write('log = ' + logfile + '\n') condorSubmitFile.write('output = ' + outputfile + '\n') condorSubmitFile.write('error = ' + errorfile + '\n') condorSubmitFile.write('notification = ' + notification + '\n') condorSubmitFile.write('+IsSleep = 1\n') condorSubmitFile.write('x509userproxy = ' + config.proxyFile + '\n\n') if arguments != None: condorSubmitFile.write('Arguments = ' + arguments + '\n') for j in range(0, int(concurrencyLevel[k]), 1): condorSubmitFile.write('Initialdir = ' + dir1 + 'job' + str(loop) + '\n') condorSubmitFile.write('Queue\n\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) condorSubmitFile.close() finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() main_log.write("%s %s %s %s %s\n"%(ctime(), 'we have', numberGlideins, 'glideins, need', requestedGlideins)) main_log.flush() sleep(5) if numberGlideins >= requestedGlideins: finished = "true" submission = condorManager.condorSubmitOne(filename) main_log.write("%s %s\n"%(ctime(), "file submitted")) shutil.move(filename, workingDir + '/' + startTime + '/' + filename) running = "true" while running != "false": check1 = condorMonitor.CondorQ() try: check1.load('JobStatus<3', []) data=check1.fetchStored() except: main_log.write("%s %s\n"%(ctime(), "condor_q failed... ignoring for now")) main_log.flush() sleep(2) continue main_log.write("%s %s %s\n"%(ctime(), len(data.keys()), 'jobs running')) main_log.flush() if len(data.keys()) == 0: running = "false" main_log.write("%s %s\n"%(ctime(), "no more running jobs")) else: main_log.write("%s %s\n"%(ctime(), "Done"))
def run(config): os.environ['_CONDOR_SEC_DEFAULT_AUTHENTICATION_METHODS']='GSI' os.environ['X509_USER_PROXY']=config.proxyFile import glideKeeper gktid=glideKeeper.GlideKeeperThread(config.webURL,config.descriptFile,config.descriptSignature, config.runId, config.myClassadID, [(config.gfactoryNode,config.gfactoryClassadID)],config.gfactoryConstraint, config.collectorNode, config.proxyFile) gktid.start() try: # first load the file, so we check it is readable fd = open('parameters.cfg', 'r') try: lines = fd.readlines() finally: fd.close() # reset the values executable = None inputFile = None outputFile = None environment = None arguments = None concurrency = None # read the values for line in lines: line = line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr = line.split('=',1) if len(arr) != 2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key = arr[0].strip() val = arr[1].strip() if key == 'executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable = val elif key == 'transfer_input_files': inputFile = val elif key == 'transfer_output_files': outputFile = val elif key == 'environment': environment = val elif key == 'arguments': arguments = val elif key == 'concurrency': concurrency=val concurrencyLevel = concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe = 'vanilla' if executable == None: raise RuntimeError, "executable was not defined!" executable = raw_input("Enter executable: "); transfer_executable = "True" when_to_transfer_output = "ON_EXIT" requirements = '(GLIDEIN_Site =!= "UCSD12") && (Arch =!= "abc")' owner = 'Undefined' notification = 'Never' # Create a testing loop for each concurrency for k in range(0, len(concurrencyLevel), 1): # request the glideins # we want 10% more glideins than the concurrency level requestedGlideins = int(concurrencyLevel[k]) totalGlideins = int(requestedGlideins + .1 * requestedGlideins) gktid.request_glideins(totalGlideins) # now we create the directories for each job and a submit file workingDir = os.getcwd() loop = 0 dir1 = workingDir + '/test' + concurrencyLevel[k] + '/' os.makedirs(dir1) logfile = workingDir + '/test' + concurrencyLevel[k] + '.log' outputfile = 'test' + concurrencyLevel[k] + '.out' errorfile = 'test' + concurrencyLevel[k] + '.err' filename = 'submit.condor' condorSubmitFile = open(filename, "w") condorSubmitFile.write('universe = ' + universe + '\n') condorSubmitFile.write('executable = ' + executable + '\n') condorSubmitFile.write('transfer_executable = ' + transfer_executable + '\n') if inputFile != None: condorSubmitFile.write('transfer_input_files = ' + inputFile + '\n') if outputFile != None: condorSubmitFile.write('transfer_output_files = ' + outputFile + '\n') if environment != None: condorSubmitFile.write('environment = ' + environment + '\n') condorSubmitFile.write('when_to_transfer_output = ' + when_to_transfer_output + '\n') condorSubmitFile.write('Requirements = ' + requirements + '\n') condorSubmitFile.write('+Owner = ' + owner + '\n') condorSubmitFile.write('log = ' + logfile + '\n') condorSubmitFile.write('output = ' + outputfile + '\n') condorSubmitFile.write('error = ' + errorfile + '\n') condorSubmitFile.write('notification = ' + notification + '\n') condorSubmitFile.write('+IsSleep = 1\n') condorSubmitFile.write('x509userproxy = ' + config.proxyFile + '\n\n') if arguments != None: condorSubmitFile.write('Arguments = ' + arguments + '\n') for j in range(0, int(concurrencyLevel[k]), 1): condorSubmitFile.write('Initialdir = ' dir1 + 'job' + str(loop) + '\n') condorSubmitFile.write('Queue\n\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) condorSubmitFile.close() # Need to figure out when we have all the glideins # Ask the glidekeeper object finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() print numberGlideins time.sleep(5) if numberGlideins >= requestedGlideins: finished = "true" # Now we begin submission and monitoring submission = condorManager.condorSubmitOne(filename) running = "true" while running != "false": check1 = condorMonitor.CondorQ() check1.load('Status==3') data = check1.fetch() if len(data.keys()) == 0: running = "false" # Cleanup all the directories and files made shutil.rmtree(dir1) finally: gktid.soft_kill() gktid.join() return
self.gFactoryNode=val
self.gfactoryNode=val
def load_config(self): # first load file, so we check it is readable fd=open(self.config,'r') try: lines=fd.readlines() finally: fd.close()
self.gFactoryConstraint=val
self.gfactoryConstraint=val
def load_config(self): # first load file, so we check it is readable fd=open(self.config,'r') try: lines=fd.readlines() finally: fd.close()
[config.gfactoryNode],config.gFactoryConstraint,
[config.gfactoryNode],config.gfactoryConstraint,
def run(config): import glideKeeper gktid=glideKeeper.GlideKeeperThread(config.webURL,self.descriptName,config.descriptSignature, config.runId, config.gfactoryClassadID, [config.gfactoryNode],config.gFactoryConstraint, config.proxyFile) gktid.start() try: # most of the code goes here # first load the file, so we check it is readable fd = open('parameters.cfg', 'r') try: lines = fd.readlines() finally: fd.close() # reset the values executable = None arguments = None concurrency = None owner = None # read the values for line in lines: line = line.strip() if line[0:1] in ('#',''): continue # ignore comments and empty lines arr = line.split('=',1) if len(arr) != 2: raise RuntimeError, 'Invalid parameter line, missing =: %s'%line key = arr[0].strip() val = arr[1].strip() if key == 'executable': if not os.path.exists(val): raise RuntimeError, "%s '%s' is not a valid executable"%(key,val) executable = val elif key == 'owner': owner=val elif key == 'arguments': arguments = val elif key == 'concurrency': concurrency=val concurrencyLevel = concurrency.split() # make sure all the needed values have been read, # and assign defaults, if needed universe = 'vanilla' if executable == None: raise RuntimeError, "executable was not defined!" executable = raw_input("Enter executable: "); transfer_executable = "True" when_to_transfer_output = "ON_EXIT" requirements = '(GLIDEIN_Site =!= "UCSD12") && (Arch =!= "abc")' if owner == None: owner = 'Undefined' notification = 'Never' # Create a testing loop for each concurrency results = [] for i in range(0, len(concurrencyLevel), 1): # request the glideins # we want 10% more glideins than the concurrency level requestedGlideins = int(concurrencyLevel[i]) totalGlideins = int(requestedGlideins + .1 * requestedGlideins)) gktid.request_glideins(totalGlideins) # now we create the directories for each job and a submit file workingDir = os.getcwd() for k in range(0, len(concurrencyLevel), 1): loop = 0 dir1 = workingDir + '/' + 'test' + concurrencyLevel[k] + '/' os.makedirs(dir1) logfile = dir1 + 'test' + concurrencyLevel[k] + '.log' outputfile = 'test' + concurrencyLevel[k] + '.out' errorfile = 'test' + concurrencyLevel[k] + '.err' filename = dir1 + 'submit.condor' FILE=open(filename, "w") FILE.write('universe=' + universe + '\n') FILE.write('executable=' + executable + '\n') FILE.write('transfer_executable=' + transfer_executable + '\n') FILE.write('when_to_transfer_output=' + when_to_transfer_output + '\n') FILE.write('Requirements=' + requirements + '\n') FILE.write('+Owner=' + owner + '\n') FILE.write('log=' + logfile + '\n') FILE.write('output=' + outputfile + '\n') FILE.write('error=' + errorfile + '\n') FILE.write('notification=' + notification + '\n' + '\n') if arguments != None: FILE.write('Arguments =' + arguments + '\n') for j in range(0, int(concurrencyLevel[k]), 1): FILE.write('Initialdir = ' + 'job' + str(loop) + '\n') FILE.write('Queue' + '\n' + '\n') loop = loop + 1 for i in range(0, int(concurrencyLevel[k]), 1): dir2 = dir1 + 'job' + str(i) + '/' os.makedirs(dir2) FILE.close() # Need to figure out when we have all the glideins # Ask the glidekeeper object finished = "false" while finished != "true": numberGlideins = gktid.get_running_glideins() if numberGlideins = requestedGlideins: finished = "true" # Now we begin submission and monitoring ## Need to figure this part out submission = condorManager.condorSubmitOne(filename) running = "true" while running != "false": check1 = condorMonitor.CondorQ() # Not sure if this is the correct constraint to put on the monitor if check1 == None: running = "false" # Need to check log files for when first # job submitted and last job finished hours = [] minutes = [] seconds = [] logCheck = open(logfile, 'r') try: lines1 = logCheck.readlines() finally: logCheck.close() for line in lines1: line = line.strip() if line[0:1] in ('(','.','U','R','J','C','G'): continue # ignore unwanted text lines arr1 = line.split(') ',1) if len(arr1) < 2: continue arr2 = arr1[1].split(' ',2) time = arr2[1].split(':',2) hours.append(int(time[0])) minutes.append(int(time[1])) seconds.append(int(time[2])) diffHours = (hours[len(hours)-1] - hours[0]) * 3600 diffMinutes = (minutes[len(minutes)-1] - minutes[0]) * 60 diffSeconds = seconds[len(seconds)-1] - seconds[0] totalTime = diffHours + diffMinutes + diffSeconds final = [totalTime, concurrencyLevel[i]] results.append(final) # Cleanup all the directories and files made shutil.rmtree(dir1) # Write results to a data file for plotting
threading.Thread(self)
threading.Thread.__init__(self)
def __init__(self, web_url,descript_fname,descript_signature, glideinkeeper_id,classad_id, factory_pools,factory_constraint, proxy_fname): threading.Thread(self) # consts self.signature_type = "sha1" self.max_request=100
self.assertTrue(e.time > (datetime.datetime.now() + datetime.timedelta(minutes=10)).time(), "The time on the entry is not older than 10 minutes!")
timestamp = datetime.datetime.combine(e.date, e.time) self.assertTrue(timestamp < datetime.datetime.now() - datetime.timedelta(minutes=10), "The time on the entry is not older than 10 minutes!")
def testTimeOutOfRange(self): try: e = get_object_or_404(Entry, pk=1) self.assertTrue(e.time > (datetime.datetime.now() + datetime.timedelta(minutes=10)).time(), "The time on the entry is not older than 10 minutes!") except Http404, e: self.fail("Entry 1 doesn't exist.") res = self.client.get("/undo/1") self.assertEquals(res.status_code, 403)
return HttpResponseRedirect(reverse("program_log.views.showdaily",))
return HttpResponseRedirect(reverse("log-show-daily",))
def addentry(request,slot): s = ProgramSlot.objects.get(pk=slot) if request.POST: n = request.POST['notes'] if n == 'Description': n = '' e = Entry.objects.create(slot=s,notes=n) return HttpResponseRedirect(reverse("program_log.views.showdaily",))
return (find_name_email_pairs(row) for row in soup.findAll('tr') if row['class'].startswith('blockTableInnerRow'))
return (find_name_email_pairs(row) for row in soup.findAll('tr'))
def safe_lookup(td): try: return td.a.string.strip().lower() except: return td.string.strip().title()
prog.refresh_feed()
try: prog.refresh_feed() except: pass
def feed(request, program, feed): prog = get_object_or_404(ProgrammingFeed, pk=feed) prog.refresh_feed() ret = {"feed": prog, "program":program, "entries":prog.programmingaudio_set.all()} return render_to_response("programming/sciam.html", ret, context_instance=RequestContext(request))
slots = ProgramSlot.objects.filter(time__start__gte=now).select_related('program', 'time')
slots = ProgramSlot.objects.filter(active=True, time__start__gte=now).select_related('program', 'time')
def next_n_hours(n): now = datetime.now().time() now = time(now.hour) end_hour = now.hour + n end = now.replace(hour=end_hour%24)
other = ProgramSlot.objects.filter(time__end__lte=end).order_by('time__start')
other = ProgramSlot.objects.filter(active=True, time__end__lte=end).order_by('time__start')
def next_n_hours(n): now = datetime.now().time() now = time(now.hour) end_hour = now.hour + n end = now.replace(hour=end_hour%24)
def db_restart():
def local_db_restart():
def db_restart(): "Delete and rebuild database on the local" with settings(warn_only=True): local("rm kelpdb") local("python2.6 manage.py syncdb") local("python2.6 manage.py loaddata fixtures/*")
local("python2.6 manage.py syncdb") local("python2.6 manage.py loaddata fixtures/*")
local("python2.6 manage.py syncdb --noinput") local("python2.6 manage.py loaddata fixtures/*")
def db_restart(): "Delete and rebuild database on the local" with settings(warn_only=True): local("rm kelpdb") local("python2.6 manage.py syncdb") local("python2.6 manage.py loaddata fixtures/*")
sudo("su -c 'rm ../kelpdb' www-data") sudo("su -c './manage.py syncdb' www-data") sudo("su -c './manage.py loaddata fixtures/*' www-data)")
with settings(warn_only=True): sudo("su -c 'rm ../kelpdb' www-data") sudo("su -c 'python manage.py syncdb --noinput' www-data") sudo("su -c 'python manage.py loaddata fixtures/*' www-data)")
def restart_database(): "Delete and rebuild the database on the remote" with cd("/home/kelp/kelp"): sudo("su -c 'rm ../kelpdb' www-data") sudo("su -c './manage.py syncdb' www-data") sudo("su -c './manage.py loaddata fixtures/*' www-data)")
if fs is None: fs = fs2.copy() else: if len(fs2) > 0 or overlap_mode == "intersection-strict":
if len(fs2) > 0 or overlap_mode == "intersection-strict": if fs is None: fs = fs2.copy() else:
def count_reads_in_features( sam_filename, gff_filename, stranded, overlap_mode, feature_type, id_attribute, quiet ): features = HTSeq.GenomicArrayOfSets( [], stranded ) counts = {} for f in HTSeq.GFF_Reader( gff_filename ): if f.iv.chrom not in features.step_vectors.keys(): features.add_chrom( f.iv.chrom ) if f.type == feature_type: try: features.add_value( f.attr[ id_attribute ], f.iv ) except KeyError: sys.exit( "Feature %s does not contain a '%s' attribute" % ( f.name, id_attribute ) ) counts[ f.attr[ id_attribute ] ] = 0 if len( counts ) == 0 and not quiet: sys.stderr.write( "Warning: No features of type '%s' found.\n" % feature_type ) first_read = iter( HTSeq.SAM_Reader( sam_filename ) ).next() pe_mode = first_read.paired_end read_seq = HTSeq.SAM_Reader( sam_filename ) if pe_mode: read_seq = HTSeq.pair_SAM_alignments( read_seq ) empty = 0 ambiguous = 0 i = 0 for r in read_seq: if not pe_mode: if not r.aligned: continue iv_seq = ( co.ref_iv for co in r.cigar if co.type == "M" ) else: if r[0] is not None and r[0].aligned: iv_seq = ( co.ref_iv for co in r[0].cigar if co.type == "M" ) else: iv_seq = tuple() if r[1] is not None and r[1].aligned: iv_seq = itertools.chain( iv_seq, ( invert_strand( co.ref_iv ) for co in r[1].cigar if co.type == "M" ) ) else: if ( r[0] is None ) or not ( r[0].aligned ): continue try: if overlap_mode == "union": fs = set() for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): fs = fs.union( fs2 ) elif overlap_mode == "intersection-strict" or overlap_mode == "intersection-nonempty": fs = None for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): if fs is None: fs = fs2.copy() else: if len(fs2) > 0 or overlap_mode == "intersection-strict": fs = fs.intersection( fs2 ) else: sys.exit( "Illegal overlap mode." ) if len( fs ) == 0: empty += 1 elif len( fs ) > 1: ambiguous += 1 else: counts[ list(fs)[0] ] += 1 except UnknownChrom: sys.stderr.write( ( "Warning: Skipping read '%s', aligned to %s, because " + "chromosome '%s' did not appear in the GFF file.\n" ) % ( r.read.name, r.iv, r.iv.chrom ) ) i += 1 if i % 100000 == 0 and not quiet: sys.stderr.write( "%d reads processed.\n" % i ) for fn in sorted( counts.keys() ): print "%s\t%d" % ( fn, counts[fn] ) print "no_feature\t%d" % empty print "ambiguous\t%d" % ambiguous
if len( fs ) == 0:
if fs is None or len( fs ) == 0:
def count_reads_in_features( sam_filename, gff_filename, stranded, overlap_mode, feature_type, id_attribute, quiet ): features = HTSeq.GenomicArrayOfSets( [], stranded ) counts = {} for f in HTSeq.GFF_Reader( gff_filename ): if f.iv.chrom not in features.step_vectors.keys(): features.add_chrom( f.iv.chrom ) if f.type == feature_type: try: features.add_value( f.attr[ id_attribute ], f.iv ) except KeyError: sys.exit( "Feature %s does not contain a '%s' attribute" % ( f.name, id_attribute ) ) counts[ f.attr[ id_attribute ] ] = 0 if len( counts ) == 0 and not quiet: sys.stderr.write( "Warning: No features of type '%s' found.\n" % feature_type ) first_read = iter( HTSeq.SAM_Reader( sam_filename ) ).next() pe_mode = first_read.paired_end read_seq = HTSeq.SAM_Reader( sam_filename ) if pe_mode: read_seq = HTSeq.pair_SAM_alignments( read_seq ) empty = 0 ambiguous = 0 i = 0 for r in read_seq: if not pe_mode: if not r.aligned: continue iv_seq = ( co.ref_iv for co in r.cigar if co.type == "M" ) else: if r[0] is not None and r[0].aligned: iv_seq = ( co.ref_iv for co in r[0].cigar if co.type == "M" ) else: iv_seq = tuple() if r[1] is not None and r[1].aligned: iv_seq = itertools.chain( iv_seq, ( invert_strand( co.ref_iv ) for co in r[1].cigar if co.type == "M" ) ) else: if ( r[0] is None ) or not ( r[0].aligned ): continue try: if overlap_mode == "union": fs = set() for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): fs = fs.union( fs2 ) elif overlap_mode == "intersection-strict" or overlap_mode == "intersection-nonempty": fs = None for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): if fs is None: fs = fs2.copy() else: if len(fs2) > 0 or overlap_mode == "intersection-strict": fs = fs.intersection( fs2 ) else: sys.exit( "Illegal overlap mode." ) if len( fs ) == 0: empty += 1 elif len( fs ) > 1: ambiguous += 1 else: counts[ list(fs)[0] ] += 1 except UnknownChrom: sys.stderr.write( ( "Warning: Skipping read '%s', aligned to %s, because " + "chromosome '%s' did not appear in the GFF file.\n" ) % ( r.read.name, r.iv, r.iv.chrom ) ) i += 1 if i % 100000 == 0 and not quiet: sys.stderr.write( "%d reads processed.\n" % i ) for fn in sorted( counts.keys() ): print "%s\t%d" % ( fn, counts[fn] ) print "no_feature\t%d" % empty print "ambiguous\t%d" % ambiguous
self.step_vectors[ chrom ][ strand ][ 0 : chrom_lengths[chrom] ] = set()
self.step_vectors[ chrom ][ "+" ][ : ] = set() self.step_vectors[ chrom ][ "-" ][ : ] = set()
def __init__( self, chrom_lengths, stranded=True ): GenomicArray.__init__( self, chrom_lengths, stranded, 'O' ) for chrom in self.step_vectors: if self.stranded: self.step_vectors[ chrom ][ strand ][ 0 : chrom_lengths[chrom] ] = set() else: self.step_vectors[ chrom ][ 0 : chrom_lengths[chrom] ] = set()
self.step_vectors[ chrom ][ 0 : chrom_lengths[chrom] ] = set()
self.step_vectors[ chrom ][ : ] = set()
def __init__( self, chrom_lengths, stranded=True ): GenomicArray.__init__( self, chrom_lengths, stranded, 'O' ) for chrom in self.step_vectors: if self.stranded: self.step_vectors[ chrom ][ strand ][ 0 : chrom_lengths[chrom] ] = set() else: self.step_vectors[ chrom ][ 0 : chrom_lengths[chrom] ] = set()
rr = r if not pe_mode else r[0] sys.stderr.write( ( "Warning: Skipping read '%s', because chromosome " + "'%s', to which it has been aligned, did not appear in the GFF file.\n" ) % ( rr.read.name, iv.chrom ) )
if not pe_mode: rr = r else: rr = r[0] if r[0] is not None else r[1] if not quiet: sys.stderr.write( ( "Warning: Skipping read '%s', because chromosome " + "'%s', to which it has been aligned, did not appear in the GFF file.\n" ) % ( rr.read.name, iv.chrom ) )
def count_reads_in_features( sam_filename, gff_filename, stranded, overlap_mode, feature_type, id_attribute, quiet ): features = HTSeq.GenomicArrayOfSets( [], stranded ) counts = {} # Try to open samfile to fail early in case it is not there open( sam_filename ).close() for f in HTSeq.GFF_Reader( gff_filename ): if f.iv.chrom not in features.step_vectors.keys(): features.add_chrom( f.iv.chrom ) if f.type == feature_type: try: feature_id = f.attr[ id_attribute ] except KeyError: sys.exit( "Feature %s does not contain a '%s' attribute" % ( f.name, id_attribute ) ) if stranded and f.iv.strand == ".": sys.exit( "Feature %s at %s does not have strand information but you are " "running htseq-count in stranded mode. Use '--stranded=no'." % ( f.name, f.iv ) ) features.add_value( feature_id, f.iv ) counts[ f.attr[ id_attribute ] ] = 0 if len( counts ) == 0 and not quiet: sys.stderr.write( "Warning: No features of type '%s' found.\n" % feature_type ) first_read = iter( HTSeq.SAM_Reader( sam_filename ) ).next() pe_mode = first_read.paired_end read_seq = HTSeq.SAM_Reader( sam_filename ) if pe_mode: read_seq = HTSeq.pair_SAM_alignments( read_seq ) empty = 0 ambiguous = 0 i = 0 for r in read_seq: if not pe_mode: if not r.aligned: continue iv_seq = ( co.ref_iv for co in r.cigar if co.type == "M" ) else: if r[0] is not None and r[0].aligned: iv_seq = ( co.ref_iv for co in r[0].cigar if co.type == "M" ) else: iv_seq = tuple() if r[1] is not None and r[1].aligned: iv_seq = itertools.chain( iv_seq, ( invert_strand( co.ref_iv ) for co in r[1].cigar if co.type == "M" ) ) else: if ( r[0] is None ) or not ( r[0].aligned ): continue try: if overlap_mode == "union": fs = set() for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): fs = fs.union( fs2 ) elif overlap_mode == "intersection-strict" or overlap_mode == "intersection-nonempty": fs = None for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): if len(fs2) > 0 or overlap_mode == "intersection-strict": if fs is None: fs = fs2.copy() else: fs = fs.intersection( fs2 ) else: sys.exit( "Illegal overlap mode." ) if fs is None or len( fs ) == 0: empty += 1 elif len( fs ) > 1: ambiguous += 1 else: counts[ list(fs)[0] ] += 1 except UnknownChrom: rr = r if not pe_mode else r[0] sys.stderr.write( ( "Warning: Skipping read '%s', because chromosome " + "'%s', to which it has been aligned, did not appear in the GFF file.\n" ) % ( rr.read.name, iv.chrom ) ) i += 1 if i % 100000 == 0 and not quiet: sys.stderr.write( "%d reads processed.\n" % i ) for fn in sorted( counts.keys() ): print "%s\t%d" % ( fn, counts[fn] ) print "no_feature\t%d" % empty print "ambiguous\t%d" % ambiguous
"Public License v3. Part of the 'HTSeq' framework." )
"Public License v3. Part of the 'HTSeq' framework, version %s." % HTSeq.__version__ )
def main(): optParser = optparse.OptionParser( usage = "%prog [options] sam_file gff_file", description= "This script takes an alignment file in SAM format and a " + "feature file in GFF format and calculates for each feature " + "the number of reads mapping to it. See " + "http://www-huber.embl.de/users/anders/HTSeq/doc/count.html for details.", epilog = "Written by Simon Anders ([email protected]), European Molecular Biology " + "Laboratory (EMBL). (c) 2010. Released under the terms of the GNU General " + "Public License v3. Part of the 'HTSeq' framework." ) optParser.add_option( "-m", "--mode", type="choice", dest="mode", choices = ( "union", "intersection-strict", "intersection-nonempty" ), default = "union", help = "mode to handle reads overlapping more than one feature" + "(choices: union, intersection-strict, intersection-nonempty; default: union)" ) optParser.add_option( "-t", "--type", type="string", dest="featuretype", default = "exon", help = "feature type (3rd column in GFF file) to be used, " + "all features of other type are ignored (default, suitable for Ensembl " + "GTF files: exon)" ) optParser.add_option( "-i", "--idattr", type="string", dest="idattr", default = "gene_id", help = "GFF attribute to be used as feature ID (default, " + "suitable for Ensembl GTF files: gene_id)" ) optParser.add_option( "-s", "--stranded", type="choice", dest="stranded", choices = ( "yes", "no" ), default = "yes", help = "whether the data is from a strand-specific assay (default: yes)" ) optParser.add_option( "-q", "--quiet", action="store_true", dest="quiet", help = "suppress progress report" ) if len( sys.argv ) == 1: optParser.print_help() sys.exit(1) (opts, args) = optParser.parse_args() if len( args ) != 2: sys.stderr.write( sys.argv[0] + ": Error: Please provide two arguments.\n" ) sys.stderr.write( " Call with '-h' to get usage information.\n" ) sys.exit( 1 ) warnings.showwarning = my_showwarning try: count_reads_in_features( args[0], args[1], opts.stranded == "yes", opts.mode, opts.featuretype, opts.idattr, opts.quiet ) except Exception: sys.stderr.write( "Error: %s\n" % str( sys.exc_info()[1] ) ) sys.stderr.write( "[Exception type: %s, raised in %s:%d]\n" % ( sys.exc_info()[1].__class__.__name__, os.path.basename(traceback.extract_tb( sys.exc_info()[2] )[-1][0]), traceback.extract_tb( sys.exc_info()[2] )[-1][1] ) ) sys.exit( 1 )
help = "suppress progress report" )
help = "suppress progress report and warnings" )
def main(): optParser = optparse.OptionParser( usage = "%prog [options] sam_file gff_file", description= "This script takes an alignment file in SAM format and a " + "feature file in GFF format and calculates for each feature " + "the number of reads mapping to it. See " + "http://www-huber.embl.de/users/anders/HTSeq/doc/count.html for details.", epilog = "Written by Simon Anders ([email protected]), European Molecular Biology " + "Laboratory (EMBL). (c) 2010. Released under the terms of the GNU General " + "Public License v3. Part of the 'HTSeq' framework." ) optParser.add_option( "-m", "--mode", type="choice", dest="mode", choices = ( "union", "intersection-strict", "intersection-nonempty" ), default = "union", help = "mode to handle reads overlapping more than one feature" + "(choices: union, intersection-strict, intersection-nonempty; default: union)" ) optParser.add_option( "-t", "--type", type="string", dest="featuretype", default = "exon", help = "feature type (3rd column in GFF file) to be used, " + "all features of other type are ignored (default, suitable for Ensembl " + "GTF files: exon)" ) optParser.add_option( "-i", "--idattr", type="string", dest="idattr", default = "gene_id", help = "GFF attribute to be used as feature ID (default, " + "suitable for Ensembl GTF files: gene_id)" ) optParser.add_option( "-s", "--stranded", type="choice", dest="stranded", choices = ( "yes", "no" ), default = "yes", help = "whether the data is from a strand-specific assay (default: yes)" ) optParser.add_option( "-q", "--quiet", action="store_true", dest="quiet", help = "suppress progress report" ) if len( sys.argv ) == 1: optParser.print_help() sys.exit(1) (opts, args) = optParser.parse_args() if len( args ) != 2: sys.stderr.write( sys.argv[0] + ": Error: Please provide two arguments.\n" ) sys.stderr.write( " Call with '-h' to get usage information.\n" ) sys.exit( 1 ) warnings.showwarning = my_showwarning try: count_reads_in_features( args[0], args[1], opts.stranded == "yes", opts.mode, opts.featuretype, opts.idattr, opts.quiet ) except Exception: sys.stderr.write( "Error: %s\n" % str( sys.exc_info()[1] ) ) sys.stderr.write( "[Exception type: %s, raised in %s:%d]\n" % ( sys.exc_info()[1].__class__.__name__, os.path.basename(traceback.extract_tb( sys.exc_info()[2] )[-1][0]), traceback.extract_tb( sys.exc_info()[2] )[-1][1] ) ) sys.exit( 1 )
except Exception:
except:
def main(): optParser = optparse.OptionParser( usage = "%prog [options] sam_file gff_file", description= "This script takes an alignment file in SAM format and a " + "feature file in GFF format and calculates for each feature " + "the number of reads mapping to it. See " + "http://www-huber.embl.de/users/anders/HTSeq/doc/count.html for details.", epilog = "Written by Simon Anders ([email protected]), European Molecular Biology " + "Laboratory (EMBL). (c) 2010. Released under the terms of the GNU General " + "Public License v3. Part of the 'HTSeq' framework." ) optParser.add_option( "-m", "--mode", type="choice", dest="mode", choices = ( "union", "intersection-strict", "intersection-nonempty" ), default = "union", help = "mode to handle reads overlapping more than one feature" + "(choices: union, intersection-strict, intersection-nonempty; default: union)" ) optParser.add_option( "-t", "--type", type="string", dest="featuretype", default = "exon", help = "feature type (3rd column in GFF file) to be used, " + "all features of other type are ignored (default, suitable for Ensembl " + "GTF files: exon)" ) optParser.add_option( "-i", "--idattr", type="string", dest="idattr", default = "gene_id", help = "GFF attribute to be used as feature ID (default, " + "suitable for Ensembl GTF files: gene_id)" ) optParser.add_option( "-s", "--stranded", type="choice", dest="stranded", choices = ( "yes", "no" ), default = "yes", help = "whether the data is from a strand-specific assay (default: yes)" ) optParser.add_option( "-q", "--quiet", action="store_true", dest="quiet", help = "suppress progress report" ) if len( sys.argv ) == 1: optParser.print_help() sys.exit(1) (opts, args) = optParser.parse_args() if len( args ) != 2: sys.stderr.write( sys.argv[0] + ": Error: Please provide two arguments.\n" ) sys.stderr.write( " Call with '-h' to get usage information.\n" ) sys.exit( 1 ) warnings.showwarning = my_showwarning try: count_reads_in_features( args[0], args[1], opts.stranded == "yes", opts.mode, opts.featuretype, opts.idattr, opts.quiet ) except Exception: sys.stderr.write( "Error: %s\n" % str( sys.exc_info()[1] ) ) sys.stderr.write( "[Exception type: %s, raised in %s:%d]\n" % ( sys.exc_info()[1].__class__.__name__, os.path.basename(traceback.extract_tb( sys.exc_info()[2] )[-1][0]), traceback.extract_tb( sys.exc_info()[2] )[-1][1] ) ) sys.exit( 1 )
except: sys.stderr.write( "Error occured when reading first line of sam file." ) raise try:
def count_reads_in_features( sam_filename, gff_filename, stranded, overlap_mode, feature_type, id_attribute, quiet, minaqual ): if quiet: warnings.filterwarnings( action="ignore", module="HTSeq" ) features = HTSeq.GenomicArrayOfSets( [], stranded ) counts = {} # Try to open samfile to fail early in case it is not there open( sam_filename ).close() gff = HTSeq.GFF_Reader( gff_filename ) i = 0 try: for f in gff: if f.iv.chrom not in features.step_vectors.keys(): features.add_chrom( f.iv.chrom ) if f.type == feature_type: try: feature_id = f.attr[ id_attribute ] except KeyError: sys.exit( "Feature %s does not contain a '%s' attribute" % ( f.name, id_attribute ) ) if stranded and f.iv.strand == ".": sys.exit( "Feature %s at %s does not have strand information but you are " "running htseq-count in stranded mode. Use '--stranded=no'." % ( f.name, f.iv ) ) features.add_value( feature_id, f.iv ) counts[ f.attr[ id_attribute ] ] = 0 i += 1 if i % 100000 == 0 and not quiet: sys.stderr.write( "%d GFF lines processed.\n" % i ) except: sys.stderr.write( "Error occured in %s.\n" % gff.get_line_number_string() ) raise if not quiet: sys.stderr.write( "%d GFF lines processed.\n" % i ) if len( counts ) == 0 and not quiet: sys.stderr.write( "Warning: No features of type '%s' found.\n" % feature_type ) try: read_seq = HTSeq.SAM_Reader( sam_filename ) first_read = iter( read_seq ).next() pe_mode = first_read.paired_end read_seq = HTSeq.SAM_Reader( sam_filename ) if pe_mode: read_seq = HTSeq.pair_SAM_alignments( read_seq ) empty = 0 ambiguous = 0 notaligned = 0 lowqual = 0 i = 0 for r in read_seq: if not pe_mode: if not r.aligned: notaligned += 1 continue if r.aQual < minaqual: lowqual += 1 continue iv_seq = ( co.ref_iv for co in r.cigar if co.type == "M" ) else: if r[0] is not None and r[0].aligned: iv_seq = ( co.ref_iv for co in r[0].cigar if co.type == "M" ) else: iv_seq = tuple() if r[1] is not None and r[1].aligned: iv_seq = itertools.chain( iv_seq, ( invert_strand( co.ref_iv ) for co in r[1].cigar if co.type == "M" ) ) else: if ( r[0] is None ) or not ( r[0].aligned ): notaligned += 1 continue if ( r[0] and r[0].aQual < minaqual ) or ( r[1] and r[1].aQual < minaqual ): lowqual += 1 continue try: if overlap_mode == "union": fs = set() for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): fs = fs.union( fs2 ) elif overlap_mode == "intersection-strict" or overlap_mode == "intersection-nonempty": fs = None for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): if len(fs2) > 0 or overlap_mode == "intersection-strict": if fs is None: fs = fs2.copy() else: fs = fs.intersection( fs2 ) else: sys.exit( "Illegal overlap mode." ) if fs is None or len( fs ) == 0: empty += 1 elif len( fs ) > 1: ambiguous += 1 else: counts[ list(fs)[0] ] += 1 except UnknownChrom: if not pe_mode: rr = r else: rr = r[0] if r[0] is not None else r[1] if not quiet: sys.stderr.write( ( "Warning: Skipping read '%s', because chromosome " + "'%s', to which it has been aligned, did not appear in the GFF file.\n" ) % ( rr.read.name, iv.chrom ) ) i += 1 if i % 100000 == 0 and not quiet: sys.stderr.write( "%d reads processed.\n" % i ) except: try: sys.stderr.write( "Error occured in %s.\n" % read_seq.get_line_number_string() ) # For paired read this does not work. (TODC: Fix it) except AttributeError: pass raise if not quiet: sys.stderr.write( "%d reads processed.\n" % i ) for fn in sorted( counts.keys() ): print "%s\t%d" % ( fn, counts[fn] ) print "no_feature\t%d" % empty print "ambiguous\t%d" % ambiguous print "too low aQual\t%d" % lowqual print "not aligned\t%d" % notaligned
try:
if not pe_mode:
def count_reads_in_features( sam_filename, gff_filename, stranded, overlap_mode, feature_type, id_attribute, quiet, minaqual ): if quiet: warnings.filterwarnings( action="ignore", module="HTSeq" ) features = HTSeq.GenomicArrayOfSets( [], stranded ) counts = {} # Try to open samfile to fail early in case it is not there open( sam_filename ).close() gff = HTSeq.GFF_Reader( gff_filename ) i = 0 try: for f in gff: if f.iv.chrom not in features.step_vectors.keys(): features.add_chrom( f.iv.chrom ) if f.type == feature_type: try: feature_id = f.attr[ id_attribute ] except KeyError: sys.exit( "Feature %s does not contain a '%s' attribute" % ( f.name, id_attribute ) ) if stranded and f.iv.strand == ".": sys.exit( "Feature %s at %s does not have strand information but you are " "running htseq-count in stranded mode. Use '--stranded=no'." % ( f.name, f.iv ) ) features.add_value( feature_id, f.iv ) counts[ f.attr[ id_attribute ] ] = 0 i += 1 if i % 100000 == 0 and not quiet: sys.stderr.write( "%d GFF lines processed.\n" % i ) except: sys.stderr.write( "Error occured in %s.\n" % gff.get_line_number_string() ) raise if not quiet: sys.stderr.write( "%d GFF lines processed.\n" % i ) if len( counts ) == 0 and not quiet: sys.stderr.write( "Warning: No features of type '%s' found.\n" % feature_type ) try: read_seq = HTSeq.SAM_Reader( sam_filename ) first_read = iter( read_seq ).next() pe_mode = first_read.paired_end read_seq = HTSeq.SAM_Reader( sam_filename ) if pe_mode: read_seq = HTSeq.pair_SAM_alignments( read_seq ) empty = 0 ambiguous = 0 notaligned = 0 lowqual = 0 i = 0 for r in read_seq: if not pe_mode: if not r.aligned: notaligned += 1 continue if r.aQual < minaqual: lowqual += 1 continue iv_seq = ( co.ref_iv for co in r.cigar if co.type == "M" ) else: if r[0] is not None and r[0].aligned: iv_seq = ( co.ref_iv for co in r[0].cigar if co.type == "M" ) else: iv_seq = tuple() if r[1] is not None and r[1].aligned: iv_seq = itertools.chain( iv_seq, ( invert_strand( co.ref_iv ) for co in r[1].cigar if co.type == "M" ) ) else: if ( r[0] is None ) or not ( r[0].aligned ): notaligned += 1 continue if ( r[0] and r[0].aQual < minaqual ) or ( r[1] and r[1].aQual < minaqual ): lowqual += 1 continue try: if overlap_mode == "union": fs = set() for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): fs = fs.union( fs2 ) elif overlap_mode == "intersection-strict" or overlap_mode == "intersection-nonempty": fs = None for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): if len(fs2) > 0 or overlap_mode == "intersection-strict": if fs is None: fs = fs2.copy() else: fs = fs.intersection( fs2 ) else: sys.exit( "Illegal overlap mode." ) if fs is None or len( fs ) == 0: empty += 1 elif len( fs ) > 1: ambiguous += 1 else: counts[ list(fs)[0] ] += 1 except UnknownChrom: if not pe_mode: rr = r else: rr = r[0] if r[0] is not None else r[1] if not quiet: sys.stderr.write( ( "Warning: Skipping read '%s', because chromosome " + "'%s', to which it has been aligned, did not appear in the GFF file.\n" ) % ( rr.read.name, iv.chrom ) ) i += 1 if i % 100000 == 0 and not quiet: sys.stderr.write( "%d reads processed.\n" % i ) except: try: sys.stderr.write( "Error occured in %s.\n" % read_seq.get_line_number_string() ) # For paired read this does not work. (TODC: Fix it) except AttributeError: pass raise if not quiet: sys.stderr.write( "%d reads processed.\n" % i ) for fn in sorted( counts.keys() ): print "%s\t%d" % ( fn, counts[fn] ) print "no_feature\t%d" % empty print "ambiguous\t%d" % ambiguous print "too low aQual\t%d" % lowqual print "not aligned\t%d" % notaligned
except AttributeError: pass
else: sys.stderr.write( "Error occured in %s.\n" % read_seq_pe_file.get_line_number_string() )
def count_reads_in_features( sam_filename, gff_filename, stranded, overlap_mode, feature_type, id_attribute, quiet, minaqual ): if quiet: warnings.filterwarnings( action="ignore", module="HTSeq" ) features = HTSeq.GenomicArrayOfSets( [], stranded ) counts = {} # Try to open samfile to fail early in case it is not there open( sam_filename ).close() gff = HTSeq.GFF_Reader( gff_filename ) i = 0 try: for f in gff: if f.iv.chrom not in features.step_vectors.keys(): features.add_chrom( f.iv.chrom ) if f.type == feature_type: try: feature_id = f.attr[ id_attribute ] except KeyError: sys.exit( "Feature %s does not contain a '%s' attribute" % ( f.name, id_attribute ) ) if stranded and f.iv.strand == ".": sys.exit( "Feature %s at %s does not have strand information but you are " "running htseq-count in stranded mode. Use '--stranded=no'." % ( f.name, f.iv ) ) features.add_value( feature_id, f.iv ) counts[ f.attr[ id_attribute ] ] = 0 i += 1 if i % 100000 == 0 and not quiet: sys.stderr.write( "%d GFF lines processed.\n" % i ) except: sys.stderr.write( "Error occured in %s.\n" % gff.get_line_number_string() ) raise if not quiet: sys.stderr.write( "%d GFF lines processed.\n" % i ) if len( counts ) == 0 and not quiet: sys.stderr.write( "Warning: No features of type '%s' found.\n" % feature_type ) try: read_seq = HTSeq.SAM_Reader( sam_filename ) first_read = iter( read_seq ).next() pe_mode = first_read.paired_end read_seq = HTSeq.SAM_Reader( sam_filename ) if pe_mode: read_seq = HTSeq.pair_SAM_alignments( read_seq ) empty = 0 ambiguous = 0 notaligned = 0 lowqual = 0 i = 0 for r in read_seq: if not pe_mode: if not r.aligned: notaligned += 1 continue if r.aQual < minaqual: lowqual += 1 continue iv_seq = ( co.ref_iv for co in r.cigar if co.type == "M" ) else: if r[0] is not None and r[0].aligned: iv_seq = ( co.ref_iv for co in r[0].cigar if co.type == "M" ) else: iv_seq = tuple() if r[1] is not None and r[1].aligned: iv_seq = itertools.chain( iv_seq, ( invert_strand( co.ref_iv ) for co in r[1].cigar if co.type == "M" ) ) else: if ( r[0] is None ) or not ( r[0].aligned ): notaligned += 1 continue if ( r[0] and r[0].aQual < minaqual ) or ( r[1] and r[1].aQual < minaqual ): lowqual += 1 continue try: if overlap_mode == "union": fs = set() for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): fs = fs.union( fs2 ) elif overlap_mode == "intersection-strict" or overlap_mode == "intersection-nonempty": fs = None for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): if len(fs2) > 0 or overlap_mode == "intersection-strict": if fs is None: fs = fs2.copy() else: fs = fs.intersection( fs2 ) else: sys.exit( "Illegal overlap mode." ) if fs is None or len( fs ) == 0: empty += 1 elif len( fs ) > 1: ambiguous += 1 else: counts[ list(fs)[0] ] += 1 except UnknownChrom: if not pe_mode: rr = r else: rr = r[0] if r[0] is not None else r[1] if not quiet: sys.stderr.write( ( "Warning: Skipping read '%s', because chromosome " + "'%s', to which it has been aligned, did not appear in the GFF file.\n" ) % ( rr.read.name, iv.chrom ) ) i += 1 if i % 100000 == 0 and not quiet: sys.stderr.write( "%d reads processed.\n" % i ) except: try: sys.stderr.write( "Error occured in %s.\n" % read_seq.get_line_number_string() ) # For paired read this does not work. (TODC: Fix it) except AttributeError: pass raise if not quiet: sys.stderr.write( "%d reads processed.\n" % i ) for fn in sorted( counts.keys() ): print "%s\t%d" % ( fn, counts[fn] ) print "no_feature\t%d" % empty print "ambiguous\t%d" % ambiguous print "too low aQual\t%d" % lowqual print "not aligned\t%d" % notaligned
features.add_value( f.attr[ id_attribute ], f.iv )
feature_id = f.attr[ id_attribute ]
def count_reads_in_features( sam_filename, gff_filename, stranded, overlap_mode, feature_type, id_attribute, quiet ): features = HTSeq.GenomicArrayOfSets( [], stranded ) counts = {} for f in HTSeq.GFF_Reader( gff_filename ): if f.iv.chrom not in features.step_vectors.keys(): features.add_chrom( f.iv.chrom ) if f.type == feature_type: try: features.add_value( f.attr[ id_attribute ], f.iv ) except KeyError: sys.exit( "Feature %s does not contain a '%s' attribute" % ( f.name, id_attribute ) ) counts[ f.attr[ id_attribute ] ] = 0 if len( counts ) == 0 and not quiet: sys.stderr.write( "Warning: No features of type '%s' found.\n" % feature_type ) first_read = iter( HTSeq.SAM_Reader( sam_filename ) ).next() pe_mode = first_read.paired_end read_seq = HTSeq.SAM_Reader( sam_filename ) if pe_mode: read_seq = HTSeq.pair_SAM_alignments( read_seq ) empty = 0 ambiguous = 0 i = 0 for r in read_seq: if not pe_mode: if not r.aligned: continue iv_seq = ( co.ref_iv for co in r.cigar if co.type == "M" ) else: if r[0] is not None and r[0].aligned: iv_seq = ( co.ref_iv for co in r[0].cigar if co.type == "M" ) else: iv_seq = tuple() if r[1] is not None and r[1].aligned: iv_seq = itertools.chain( iv_seq, ( invert_strand( co.ref_iv ) for co in r[1].cigar if co.type == "M" ) ) else: if ( r[0] is None ) or not ( r[0].aligned ): continue try: if overlap_mode == "union": fs = set() for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): fs = fs.union( fs2 ) elif overlap_mode == "intersection-strict" or overlap_mode == "intersection-nonempty": fs = None for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): if len(fs2) > 0 or overlap_mode == "intersection-strict": if fs is None: fs = fs2.copy() else: fs = fs.intersection( fs2 ) else: sys.exit( "Illegal overlap mode." ) if fs is None or len( fs ) == 0: empty += 1 elif len( fs ) > 1: ambiguous += 1 else: counts[ list(fs)[0] ] += 1 except UnknownChrom: sys.stderr.write( ( "Warning: Skipping read '%s', aligned to %s, because " + "chromosome '%s' did not appear in the GFF file.\n" ) % ( r.read.name, r.iv, r.iv.chrom ) ) i += 1 if i % 100000 == 0 and not quiet: sys.stderr.write( "%d reads processed.\n" % i ) for fn in sorted( counts.keys() ): print "%s\t%d" % ( fn, counts[fn] ) print "no_feature\t%d" % empty print "ambiguous\t%d" % ambiguous
count_reads_in_features( args[0], args[1], opts.stranded == "yes", opts.mode, opts.featuretype, opts.idattr, opts.quiet ) def my_showwarning( message, category, filename, lineno = None, line = None ): sys.stderr.write( "Warning: %s\n" % message ) if __name__ == "__main__":
def main(): optParser = optparse.OptionParser( usage = "%prog [options] sam_file gff_file", description= "This script takes an alignment file in SAM format and a " + "feature file in GFF format and calculates for each feature " + "the number of reads mapping to it. See " + "http://www-huber.embl.de/users/anders/HTSeq/doc/count.html for details.", epilog = "Written by Simon Anders ([email protected]), European Molecular Biology " + "Laboratory (EMBL). (c) 2010. Released under the terms of the GNU General " + "Public License v3. Part of the 'HTSeq' framework." ) optParser.add_option( "-m", "--mode", type="choice", dest="mode", choices = ( "union", "intersection-strict", "intersection-nonempty" ), default = "union", help = "mode to handle reads overlapping more than one feature" + "(choices: union, intersection-strict, intersection-nonempty; default: union)" ) optParser.add_option( "-t", "--type", type="string", dest="featuretype", default = "exon", help = "feature type (3rd column in GFF file) to be used, " + "all features of other type are ignored (default, suitable for Ensembl " + "GTF files: exon)" ) optParser.add_option( "-i", "--idattr", type="string", dest="idattr", default = "gene_id", help = "GFF attribute to be used as feature ID (default, " + "suitable for Ensembl GTF files: gene_id)" ) optParser.add_option( "-s", "--stranded", type="choice", dest="stranded", choices = ( "yes", "no" ), default = "yes", help = "whether the data is from a strand-specific assay (default: yes)" ) optParser.add_option( "-q", "--quiet", action="store_true", dest="quiet", help = "suppress progress report" ) if len( sys.argv ) == 1: optParser.print_help() sys.exit(1) (opts, args) = optParser.parse_args() if len( args ) != 2: sys.stderr.write( sys.argv[0] + ": Error: Please provide two arguments.\n" ) sys.stderr.write( " Call with '-h' to get usage information.\n" ) sys.exit( 1 ) count_reads_in_features( args[0], args[1], opts.stranded == "yes", opts.mode, opts.featuretype, opts.idattr, opts.quiet )
main()
count_reads_in_features( args[0], args[1], opts.stranded == "yes", opts.mode, opts.featuretype, opts.idattr, opts.quiet )
def my_showwarning( message, category, filename, lineno = None, line = None ): sys.stderr.write( "Warning: %s\n" % message )
sys.stderr.write( ( "Warning: Skipping read '%s', aligned to %s, because " + "chromosome '%s' did not appear in the GFF file.\n" ) % ( r.read.name, r.iv, r.iv.chrom ) )
rr = r if not pe_mode else r[0] sys.stderr.write( ( "Warning: Skipping read '%s', because chromosome " + "'%s', to which it has been aligned, did not appear in the GFF file.\n" ) % ( rr.read.name, iv.chrom ) )
def count_reads_in_features( sam_filename, gff_filename, stranded, overlap_mode, feature_type, id_attribute, quiet ): features = HTSeq.GenomicArrayOfSets( [], stranded ) counts = {} for f in HTSeq.GFF_Reader( gff_filename ): if f.iv.chrom not in features.step_vectors.keys(): features.add_chrom( f.iv.chrom ) if f.type == feature_type: try: feature_id = f.attr[ id_attribute ] except KeyError: sys.exit( "Feature %s does not contain a '%s' attribute" % ( f.name, id_attribute ) ) if stranded and f.iv.strand == ".": sys.exit( "Feature %s at %s does not have strand information but you are " "running htseq-count in stranded mode. Use '--stranded=no'." % ( f.name, f.iv ) ) features.add_value( feature_id, f.iv ) counts[ f.attr[ id_attribute ] ] = 0 if len( counts ) == 0 and not quiet: sys.stderr.write( "Warning: No features of type '%s' found.\n" % feature_type ) first_read = iter( HTSeq.SAM_Reader( sam_filename ) ).next() pe_mode = first_read.paired_end read_seq = HTSeq.SAM_Reader( sam_filename ) if pe_mode: read_seq = HTSeq.pair_SAM_alignments( read_seq ) empty = 0 ambiguous = 0 i = 0 for r in read_seq: if not pe_mode: if not r.aligned: continue iv_seq = ( co.ref_iv for co in r.cigar if co.type == "M" ) else: if r[0] is not None and r[0].aligned: iv_seq = ( co.ref_iv for co in r[0].cigar if co.type == "M" ) else: iv_seq = tuple() if r[1] is not None and r[1].aligned: iv_seq = itertools.chain( iv_seq, ( invert_strand( co.ref_iv ) for co in r[1].cigar if co.type == "M" ) ) else: if ( r[0] is None ) or not ( r[0].aligned ): continue try: if overlap_mode == "union": fs = set() for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): fs = fs.union( fs2 ) elif overlap_mode == "intersection-strict" or overlap_mode == "intersection-nonempty": fs = None for iv in iv_seq: if iv.chrom not in features.step_vectors: raise UnknownChrom for fs2 in features.get_steps( iv, values_only=True ): if len(fs2) > 0 or overlap_mode == "intersection-strict": if fs is None: fs = fs2.copy() else: fs = fs.intersection( fs2 ) else: sys.exit( "Illegal overlap mode." ) if fs is None or len( fs ) == 0: empty += 1 elif len( fs ) > 1: ambiguous += 1 else: counts[ list(fs)[0] ] += 1 except UnknownChrom: sys.stderr.write( ( "Warning: Skipping read '%s', aligned to %s, because " + "chromosome '%s' did not appear in the GFF file.\n" ) % ( r.read.name, r.iv, r.iv.chrom ) ) i += 1 if i % 100000 == 0 and not quiet: sys.stderr.write( "%d reads processed.\n" % i ) for fn in sorted( counts.keys() ): print "%s\t%d" % ( fn, counts[fn] ) print "no_feature\t%d" % empty print "ambiguous\t%d" % ambiguous
print "attr:", attrStr
def parse_GFF_attribute_string( attrStr, extra_return_first_value=False ): """Parses a GFF attribute string and returns it as a dictionary. If 'extra_return_first_value' is set, a pair is returned: the dictionary and the value of the first attribute. This might be useful if this is the ID. """ if attrStr.endswith( "\n" ): attrStr = attrStr[:-1] print "attr:", attrStr d = {} for (i, attr) in itertools.izip( itertools.count(), attrStr.split( ";" ) ): if _re_attr_empty.match( attr ): continue if attr.count( '"' ) not in ( 0, 2 ): raise ValueError, "The attribute string seems to contain mismatched quotes." mo = _re_attr_main.match( attr ) if not mo: raise ValueError, "Failure parsing GFF attribute line" val = mo.group(2) if val.startswith( '"' ) and val.endswith( '"' ): val = val[1:-1] val = urllib.unquote( val ) d[ intern(mo.group(1)) ] = intern(val) if extra_return_first_value and i == 0: first_val = val if extra_return_first_value: return ( d, first_val ) else: return d
print seqname, length
def get_sequence_lengths( self ): seqname = None seqlengths = {} for line in FileOrSequence.__iter__( self ): if line.startswith( ">" ): if seqname is not None: seqlengths[ seqname ] = length print seqname, length mo = _re_fasta_header_line.match( line ) seqname = mo.group(1) length = 0 else: assert seqname is not None, "FASTA file does not start with '>'." length += len( line.rstrip() ) if seqname is not None: seqlengths[ seqname ] = length return seqlengths
pass else: algnt = SAM_Alignment( line ) yield algnt
continue algnt = SAM_Alignment( line ) yield algnt
def __iter__( self ): for line in FileOrSequence.__iter__( self ): if line.startswith( "@" ): # do something with the header line pass
def __init__( self, dict chrom_lengths, bool stranded=True ):
def __init__( self, chrom_lengths, stranded=True ):
def __init__( self, dict chrom_lengths, bool stranded=True ): GenomicArray.__init__( self, chrom_lengths, stranded, 'O' ) for chrom in self.step_vectors: if self.stranded: self.step_vectors[ chrom ][ strand ][ 0 : self.chrom_lengths[chrom] ] = set() else: self.step_vectors[ chrom ][ 0 : self.chrom_lengths[chrom] ] = set()
self.step_vectors[ chrom ][ strand ][ 0 : self.chrom_lengths[chrom] ] = set()
self.step_vectors[ chrom ][ strand ][ 0 : chrom_lengths[chrom] ] = set()
def __init__( self, dict chrom_lengths, bool stranded=True ): GenomicArray.__init__( self, chrom_lengths, stranded, 'O' ) for chrom in self.step_vectors: if self.stranded: self.step_vectors[ chrom ][ strand ][ 0 : self.chrom_lengths[chrom] ] = set() else: self.step_vectors[ chrom ][ 0 : self.chrom_lengths[chrom] ] = set()