rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
version=None, pdf=False, vt_tag='',parameters=''):
version=None, pdf=False, vt_tag='',parameters='', is_local=True):
def run_from_db(self, host, port, db_name, vt_id, path_to_figures, version=None, pdf=False, vt_tag='',parameters=''): self.server_logger.info("Request: run_vistrail_from_db(%s,%s,%s,%s,%s,%s,%s,%s,%s)" % \ (host, port, db_name, vt_id, path_to_figures, version, pdf, vt_tag, parameters))
def get_wf_graph_pdf(self, host, port, db_name, vt_id, version):
def get_wf_graph_pdf(self, host, port, db_name, vt_id, version, is_local=True):
def get_wf_graph_pdf(self, host, port, db_name, vt_id, version): """get_wf_graph_pdf(host:str, port:int, db_name:str, vt_id:int, version:int) -> str Returns the relative url to the generated PDF """ self.server_logger.info("get_wf_graph_pdf(%s,%s,%s,%s,%s) request received" % \ (host, port, db_name, vt_id, version)) try: vt_id = long(vt_id) version = long(version) subdir = 'workflows' filepath = os.path.join(media_dir, 'graphs', subdir) base_fname = "graph_%s_%s.pdf" % (vt_id, version) filename = os.path.join(filepath,base_fname) if ((not os.path.exists(filepath) or os.path.exists(filepath) and not os.path.exists(filename)) and self.proxies_queue is not None): #this server can send requests to other instances proxy = self.proxies_queue.get() try: result = proxy.get_wf_graph_pdf(host,port,db_name, vt_id, version) self.proxies_queue.put(proxy) self.server_logger.info("get_wf_graph_pdf returning %s"% result) return result except Exception, e: self.server_logger.error(str(e)) return (str(e), 0)
def get_wf_graph_png(self, host, port, db_name, vt_id, version):
def get_wf_graph_png(self, host, port, db_name, vt_id, version, is_local=True):
def get_wf_graph_png(self, host, port, db_name, vt_id, version): """get_wf_graph_png(host:str, port:int, db_name:str, vt_id:int, version:int) -> str Returns the relative url to the generated image """ self.server_logger.info("get_wf_graph_png(%s,%s,%s,%s,%s) request received" % \ (host, port, db_name, vt_id, version)) try: vt_id = long(vt_id) version = long(version) subdir = 'workflows' filepath = os.path.join(media_dir, 'graphs', subdir) base_fname = "graph_%s_%s.png" % (vt_id, version) filename = os.path.join(filepath,base_fname) if ((not os.path.exists(filepath) or os.path.exists(filepath) and not os.path.exists(filename)) and self.proxies_queue is not None): #this server can send requests to other instances proxy = self.proxies_queue.get() try: self.server_logger.info("Sending request to %s" % proxy) result = proxy.get_wf_graph_png(host, port, db_name, vt_id, version) self.proxies_queue.put(proxy) self.server_logger.info("returning %s" % result) return result except Exception, e: self.server_logger.error(str(e)) return (str(e), 0) #if it gets here, this means that we will execute on this instance if not os.path.exists(filepath): os.mkdir(filepath)
def get_vt_graph_png(self, host, port, db_name, vt_id):
def get_vt_graph_png(self, host, port, db_name, vt_id, is_local=True):
def get_vt_graph_png(self, host, port, db_name, vt_id): """get_vt_graph_png(host:str, port: str, db_name: str, vt_id:str) -> str Returns the relative url of the generated image """
result = proxy.get_vt_graph_png(host, port, db_name, vt_id)
result = proxy.get_vt_graph_png(host, port, db_name, vt_id, is_local)
def get_vt_graph_png(self, host, port, db_name, vt_id): """get_vt_graph_png(host:str, port: str, db_name: str, vt_id:str) -> str Returns the relative url of the generated image """
def getPDFWorkflowMedley(self, m_id):
def getPDFWorkflowMedley(self, m_id, is_local=True):
def getPDFWorkflowMedley(self, m_id): """getPDFWorkflowMedley(m_id:int) -> str Returns the relative url to the generated image """ self.server_logger.info("getPDFWorkflowMedley(%s) request received" % m_id) try: m_id = int(m_id) medley = self.medley_objs[m_id] except Exception, e: self.server_logger.error(str(e))
def getPNGWorkflowMedley(self, m_id):
def getPNGWorkflowMedley(self, m_id, is_local=True):
def getPNGWorkflowMedley(self, m_id): self.server_logger.info("getPNGWorkflowMedley(%s) request received" % m_id) try: m_id = int(m_id) medley = self.medley_objs[m_id] except Exception, e: self.server_logger.error(str(e))
def get_vt_tagged_versions(self, host, port, db_name, vt_id):
def get_vt_tagged_versions(self, host, port, db_name, vt_id, is_local=True):
def get_vt_tagged_versions(self, host, port, db_name, vt_id): self.server_logger.info("Request: get_vt_tagged_versions(%s,%s,%s,%s)" % \ (host, port, db_name, vt_id)) try: locator = DBLocator(host=host, port=int(port), database=db_name, user=db_read_user, passwd=db_read_pass, obj_id=int(vt_id), obj_type=None, connection_id=None)
methodBox = self.parent().parent().parent()
methodBox = self.parent().parent().parent().parent()
def updateMethod(self): """ updateMethod() -> None Update the method values to vistrail. We only keep a monotonic version tree of the query pipeline, we can skip the actions here. """ methodBox = self.parent().parent().parent() if methodBox.controller: paramList = [] pipeline = methodBox.controller.current_pipeline f = pipeline.modules[methodBox.module.id].functions[self.fId] p = f.params for i in xrange(len(self.fields)): p[i].strValue = str(self.fields[i].editor.contents()) p[i].queryMethod = self.fields[i].selector.getCurrentMethod()
for module in self.pipeline.module_list:
for module in p.module_list:
def get_wf_vt_zip(self, host, port, db_name, vt_id, version): """get_wf_vt_zip(host:str, port:str, db_name:str, vt_id:str, version:str) -> str Returns a vt file containing the single workflow defined by version encoded as base64 string """ self.server_logger.info("Request: get_wf_vt_zip(%s,%s,%s,%s,%s)"%(host, port, db_name, vt_id, version)) try: locator = DBLocator(host=host, port=int(port), database=db_name, user='vtserver', passwd='', obj_id=int(vt_id), obj_type=None, connection_id=None) (v, _ , _) = io.load_vistrail(locator) p = v.getPipeline(long(version)) if p: vistrail = Vistrail() action_list = [] for module in self.pipeline.module_list: action_list.append(('add', module)) for connection in self.pipeline.connection_list: action_list.append(('add', connection)) action = core.db.action.create_action(action_list) vistrail.add_action(action, 0L) vistrail.addTag("Imported workflow", action.id) pipxmlstr = io.serialize(vistrail) result = base64.b64encode(pipxmlstr) else: result = "Error: Pipeline was not materialized" self.server_logger.info(result) except Exception, e: result = "Error: %s"%str(e) self.server_logger.info(result) return result
for connection in self.pipeline.connection_list:
for connection in p.connection_list:
def get_wf_vt_zip(self, host, port, db_name, vt_id, version): """get_wf_vt_zip(host:str, port:str, db_name:str, vt_id:str, version:str) -> str Returns a vt file containing the single workflow defined by version encoded as base64 string """ self.server_logger.info("Request: get_wf_vt_zip(%s,%s,%s,%s,%s)"%(host, port, db_name, vt_id, version)) try: locator = DBLocator(host=host, port=int(port), database=db_name, user='vtserver', passwd='', obj_id=int(vt_id), obj_type=None, connection_id=None) (v, _ , _) = io.load_vistrail(locator) p = v.getPipeline(long(version)) if p: vistrail = Vistrail() action_list = [] for module in self.pipeline.module_list: action_list.append(('add', module)) for connection in self.pipeline.connection_list: action_list.append(('add', connection)) action = core.db.action.create_action(action_list) vistrail.add_action(action, 0L) vistrail.addTag("Imported workflow", action.id) pipxmlstr = io.serialize(vistrail) result = base64.b64encode(pipxmlstr) else: result = "Error: Pipeline was not materialized" self.server_logger.info(result) except Exception, e: result = "Error: %s"%str(e) self.server_logger.info(result) return result
if tm_item.center != nm.center:
if tm_item.scenePos().x() != nm.center.x or \ -tm_item.scenePos().y() != nm.center.y:
def setupScene(self, pipeline): """ setupScene(pipeline: Pipeline) -> None Construct the scene to view a pipeline """ old_pipeline = self.pipeline self.pipeline = pipeline
type(subworkflow[0]) == type and
type(subworkflow[0]) == str and
def auto_add_subworkflow(self, subworkflow): if type(subworkflow) == str: return self.add_subworkflow(subworkflow) elif (type(subworkflow) == tuple and len(subworkflow) == 2 and type(subworkflow[0]) == type and type(subworkflow[1]) == dict): descriptor = self.add_subworkflow(subworkflow[0], **subworkflow[1]) return descriptor else: raise TypeError("Expected filename or (filename, kwargs)")
s = d.get_port_spec(port.name, PortSpec.port_type_map.inverse[port.type])
port_type = PortSpec.port_type_map.inverse[port.type] s = reg.get_port_spec_from_descriptor(d, port.name, port_type)
def check_connection_port(port): try: s = d.get_port_spec(port.name, PortSpec.port_type_map.inverse[port.type]) # the old port spec doesn't actually exist for # the invalid module so we cannot compare! # if s <> port.spec: # msg = ("%s connection to port %s has mismatched type" % # (PortSpec.port_type_map.inverse[port.type], # port.name)) # raise UpgradeWorkflowError(msg) except Exception, e: import traceback traceback.print_exc() msg = ("%s connection to port %s does not exist." % (PortSpec.port_type_map.inverse[port.type], port.name)) raise UpgradeWorkflowError(msg)
msg = ("%s connection to port %s does not exist." % (PortSpec.port_type_map.inverse[port.type], port.name))
msg = ("%s connection to port %s of module %s " "does not exist." % \ (PortSpec.port_type_map.inverse[port.type], port.name, invalid_module.name))
def check_connection_port(port): try: s = d.get_port_spec(port.name, PortSpec.port_type_map.inverse[port.type]) # the old port spec doesn't actually exist for # the invalid module so we cannot compare! # if s <> port.spec: # msg = ("%s connection to port %s has mismatched type" % # (PortSpec.port_type_map.inverse[port.type], # port.name)) # raise UpgradeWorkflowError(msg) except Exception, e: import traceback traceback.print_exc() msg = ("%s connection to port %s does not exist." % (PortSpec.port_type_map.inverse[port.type], port.name)) raise UpgradeWorkflowError(msg)
reg_spec = d.get_port_spec(function.name, 'input')
reg_spec = reg.get_port_spec_from_descriptor(d, function.name, 'input')
def check_connection_port(port): try: s = d.get_port_spec(port.name, PortSpec.port_type_map.inverse[port.type]) # the old port spec doesn't actually exist for # the invalid module so we cannot compare! # if s <> port.spec: # msg = ("%s connection to port %s has mismatched type" % # (PortSpec.port_type_map.inverse[port.type], # port.name)) # raise UpgradeWorkflowError(msg) except Exception, e: import traceback traceback.print_exc() msg = ("%s connection to port %s does not exist." % (PortSpec.port_type_map.inverse[port.type], port.name)) raise UpgradeWorkflowError(msg)
GIT_REVISION_CMD = '%s log --pretty=format:"%%H" HEAD^..' % GIT_BASE_CMD
GIT_REVISION_CMD = "%s rev-parse HEAD" % GIT_BASE_CMD
pattern = re.compile(r"(^\s*?def vistrails_revision\(\):.*?release = ['\"])([a-fA-F0-9]+?)(['\"].*?return release)", re.DOTALL | re.MULTILINE)
REVISION = revision_proc.communicate()[0]
REVISION = (revision_proc.communicate()[0]).strip()
def errexit(errcodeobj, showtrace=True, *args): errcode = errcodeobj[0] errmsg = errcodeobj[1] if args: errmsg = errmsg % args if showtrace: errio = StringIO() errio.write(errmsg + "\n") traceio = StringIO() traceback.print_exc(file=traceio) traceio.seek(0) errio.writelines([INDENT + line for line in traceio.readlines()]) errio.seek(0) errmsg = errio.read() error(errmsg) exit(errcode)
user='vtserver', passwd='',
user=db_read_user, passwd=db_read_pass,
def get_wf_modules(self, host, port, db_name, vt_id, version): """get_wf_modules(host:str, port:int, db_name:str, vt_id:int, version:int) -> list of dict Returns a list of information about the modules used in a workflow in a list of dictionaries. The dictionary has the following keys: name, package, documentation. """ self.server_logger.info("Request: get_wf_modules(%s,%s,%s,%s,%s)"%(host, port, db_name, vt_id, version)) try: locator = DBLocator(host=host, port=int(port), database=db_name, user='vtserver', passwd='', obj_id=int(vt_id), obj_type=None, connection_id=None)
name=filename, user=user, passwd='r3P0:4uTH')
name=filename, user=db_write_user, passwd=db_write_pass)
def add_vt_to_db(self, host, port, db_name, user, vt_filepath, filename, repository_vt_id, repository_creator): """add_vt_to_db(host:str, port:int, db_name:str, user:str, vt_filepath:str, filename:str, repository_vt_id:int, repository_creator:str) -> int This will add a vistrail in vt_filepath to the the database. Before adding it it will annotate the vistrail with the repository_vt_id and repository_creator. """ try: locator = ZIPFileLocator(vt_filepath).load() if repository_vt_id != -1: vistrail = locator.vistrail vistrail.set_annotation('repository_vt_id', repository_vt_id) vistrail.set_annotation('repository_creator', repository_creator) #print "name=%s"%filename db_locator = DBLocator(host=host, port=int(port), database=db_name, name=filename, user=user, passwd='r3P0:4uTH') #print "db_locator %s" % db_locator db_locator.save_as(locator) return db_locator.obj_id except Exception, e: self.server_logger.info("Error: %s"%str(e)) return "FAILURE: %s" %str(e)
config['user'] = user config['passwd'] = 'pass'
config['user'] = db_write_user config['passwd'] = db_write_pass
def remove_vt_from_db(self, host, port, db_name, user, vt_id): """remove_vt_from_db(host:str, port:int, db_name:str, user:str, vt_id:int) -> 0 or 1 Remove a vistrail from the repository """ config = {} config['host'] = host config['port'] = int(port) config['db'] = db_name config['user'] = user config['passwd'] = 'pass' try: conn = db.services.io.open_db_connection(config) db.services.io.delete_entity_from_db(conn,'vistrail', vt_id) db.services.io.close_db_connection(conn) return 1 except Exception, e: self.server_logger.info("Error: %s"%str(e)) if conn: db.services.io.close_db_connection(conn) return "FAILURE: %s" %str(e)
user='vtserver', passwd='',
user=db_read_user, passwd=db_read_pass,
def get_runnable_workflows(self, host, port, db_name, vt_id): print "get_runnable_workflows" try: locator = DBLocator(host=host, port=int(port), database=db_name, user='vtserver', passwd='', obj_id=int(vt_id), obj_type=None, connection_id=None)
user='vtserver', passwd='',
user=db_read_user, passwd=db_read_pass,
def get_wf_datasets(self, host, port, db_name, vt_id, version): print 'get workflow datasets' self.server_logger.info("Request: get_wf_datasets(%s,%s,%s,%s,%s)"%(host, port, db_name, vt_id, version)) try: locator = DBLocator(host=host, port=int(port), database=db_name, user='vtserver', passwd='', obj_id=int(vt_id), obj_type=None, connection_id=None)
user='repository', passwd='r3P0:4uTH',
user=db_write_user, passwd=db_write_pass,
def run_from_db(self, host, port, db_name, vt_id, path_to_figures, version=None, vt_tag='', parameters=''):
user='vtserver', passwd='',
user=db_read_user, passwd=db_read_pass,
def get_tag_version(self, host, port, db_name, vt_id, vt_tag): self.server_logger.info("Request: get_tag_version(%s,%s,%s,%s,%s)"%(host, port, db_name, vt_id, vt_tag)) version = -1 try: locator = DBLocator(host=host, port=int(port), database=db_name, user='vtserver', passwd='', obj_id=int(vt_id), obj_type=None, connection_id=None)
user='vtserver', passwd='',
user=db_read_user, passwd=db_read_pass,
def get_vt_xml(self, host, port, db_name, vt_id): self.server_logger.info("Request: get_vt_xml(%s,%s,%s,%s)"%(host, port, db_name, vt_id)) try: locator = DBLocator(host=host, port=int(port), database=db_name, user='vtserver', passwd='', obj_id=int(vt_id), obj_type=None, connection_id=None)
user='vtserver', passwd='',
user=db_read_user, passwd=db_read_pass,
def get_wf_xml(self, host, port, db_name, vt_id, version): self.server_logger.info("Request: get_wf_xml(%s,%s,%s,%s,%s)"%(host, port, db_name, vt_id, version)) try: locator = DBLocator(host=host, port=int(port), database=db_name, user='vtserver', passwd='', obj_id=int(vt_id), obj_type=None, connection_id=None)
def getPDFWorkflow(self, m_id): self.server_logger.info( "getPDFWorkflow(%s) request received"%m_id) print "getPDFWorkflow(%s) request received"%m_id try: m_id = int(m_id) medley = self.medley_objs[m_id] except Exception, e: print str(e) try: locator = DBLocator(host=db_host, port=3306, database='vistrails', user='vtserver', passwd='', obj_id=medley._vtid, obj_type=None, connection_id=None) version = long(medley._version) subdir = os.path.join('workflows', hashlib.sha224("%s_%s"%(str(locator),version)).hexdigest()) filepath = os.path.join('/server/crowdlabs/site_media/media/medleys/images', subdir) base_fname = "%s_%s.pdf" % (str(locator.short_name), version) filename = os.path.join(filepath,base_fname) if ((not os.path.exists(filepath) or os.path.exists(filepath) and not os.path.exists(filename)) and self.proxies_queue is not None): proxy = self.proxies_queue.get() try: print "Sending request to ", proxy result = proxy.getPDFWorkflow(m_id) self.proxies_queue.put(proxy) print "returning %s"% result self.server_logger.info("returning %s"% result) return result except Exception, e: print "Exception: ", str(e) return "" if not os.path.exists(filepath): os.mkdir(filepath) if not os.path.exists(filename): (v, abstractions , thumbnails) = io.load_vistrail(locator) controller = VistrailController() controller.set_vistrail(v, locator, abstractions, thumbnails) controller.change_selected_version(version) print medley._vtid, " ", medley._version p = controller.current_pipeline from gui.pipeline_view import QPipelineView pipeline_view = QPipelineView() pipeline_view.scene().setupScene(p) pipeline_view.scene().saveToPDF(filename) del pipeline_view else: print "found cached pdf: ", filename return os.path.join(subdir,base_fname) except Exception, e: print "Error when saving pdf: ", str(e) def getPNGWorkflow(self, m_id): self.server_logger.info( "getPNGWorkflow(%s) request received"%m_id) print "getPNGWorkflow(%s) request received"%m_id try: m_id = int(m_id) medley = self.medley_objs[m_id] except Exception, e: print str(e) try: locator = DBLocator(host=db_host, port=3306, database='vistrails', user='vtserver', passwd='', obj_id=medley._vtid, obj_type=None, connection_id=None) version = long(medley._version) subdir = os.path.join('workflows', hashlib.sha224("%s_%s"%(str(locator),version)).hexdigest()) filepath = os.path.join('/server/crowdlabs/site_media/media/medleys/images', subdir) base_fname = "%s_%s.png" % (str(locator.short_name), version) filename = os.path.join(filepath,base_fname) if ((not os.path.exists(filepath) or os.path.exists(filepath) and not os.path.exists(filename)) and self.proxies_queue is not None): proxy = self.proxies_queue.get() try: print "Sending request to ", proxy result = proxy.getPNGWorkflow(m_id) self.proxies_queue.put(proxy) print "returning %s"% result self.server_logger.info("returning %s"% result) return result except Exception, e: print "Exception: ", str(e) return "" if not os.path.exists(filepath): os.mkdir(filepath) if not os.path.exists(filename): (v, abstractions , thumbnails) = io.load_vistrail(locator) controller = VistrailController() controller.set_vistrail(v, locator, abstractions, thumbnails) controller.change_selected_version(version) print medley._vtid, " ", medley._version p = controller.current_pipeline from gui.pipeline_view import QPipelineView pipeline_view = QPipelineView() pipeline_view.scene().setupScene(p) pipeline_view.scene().saveToPNG(filename) del pipeline_view else: print "Found cached image: ", filename return os.path.join(subdir,base_fname) except Exception, e: print "Error when saving png: ", str(e)
def get_wf_xml(self, host, port, db_name, vt_id, version): self.server_logger.info("Request: get_wf_xml(%s,%s,%s,%s,%s)"%(host, port, db_name, vt_id, version)) try: locator = DBLocator(host=host, port=int(port), database=db_name, user='vtserver', passwd='', obj_id=int(vt_id), obj_type=None, connection_id=None)
user='vtserver', passwd='',
user=db_read_user, passwd=db_read_pass,
def get_vt_zip(self, host, port, db_name, vt_id): """get_vt_zip(host:str, port: str, db_name: str, vt_id:str) -> str Returns a .vt file encoded as base64 string """ self.server_logger.info("Request: get_vt_zip(%s,%s,%s,%s)"%(host, port, db_name, vt_id)) try: locator = DBLocator(host=host, port=int(port), database=db_name, user='vtserver', passwd='', obj_id=int(vt_id), obj_type=None, connection_id=None) save_bundle = locator.load() #create temporary file (fd, name) = tempfile.mkstemp(prefix='vt_tmp', suffix='.vt') os.close(fd) fileLocator = FileLocator(name) fileLocator.save(save_bundle) contents = open(name).read() result = base64.b64encode(contents) os.unlink(name) self.server_logger.info("SUCCESS!") return result except Exception, e: self.server_logger.info("Error: %s"%str(e)) return "FAILURE: %s" %str(e)
user='vtserver', passwd='',
user=db_read_user, passwd=db_read_pass,
def get_wf_vt_zip(self, host, port, db_name, vt_id, version): """get_wf_vt_zip(host:str, port:str, db_name:str, vt_id:str, version:str) -> str Returns a vt file containing the single workflow defined by version encoded as base64 string """ self.server_logger.info("Request: get_wf_vt_zip(%s,%s,%s,%s,%s)"%(host, port, db_name, vt_id, version)) try: locator = DBLocator(host=host, port=int(port), database=db_name, user='vtserver', passwd='', obj_id=int(vt_id), obj_type=None, connection_id=None) (v, _ , _) = io.load_vistrail(locator) p = v.getPipeline(long(version)) if p: vistrail = Vistrail() action_list = [] for module in p.module_list: action_list.append(('add', module)) for connection in p.connection_list: action_list.append(('add', connection)) action = core.db.action.create_action(action_list) vistrail.add_action(action, 0L) vistrail.addTag("Imported workflow", action.id) pipxmlstr = io.serialize(vistrail) result = base64.b64encode(pipxmlstr) else: result = "Error: Pipeline was not materialized" self.server_logger.info(result) except Exception, e: result = "Error: %s"%str(e) self.server_logger.info(result) return result
config['user'] = 'vtserver' config['passwd'] = ''
config['user'] = db_read_user config['passwd'] = db_read_pass
def get_db_vt_list(self, host, port, db_name): self.server_logger.info("Request: get_db_vistrail_list(%s,%s,%s)"%(host, port, db_name)) config = {} config['host'] = host config['port'] = int(port) config['db'] = db_name config['user'] = 'vtserver' config['passwd'] = '' try: rows = io.get_db_vistrail_list(config) return rows except Exception, e: self.server_logger.info("Error: %s"%str(e)) return "FAILURE: %s" %str(e)
config['user'] = 'vtserver' config['passwd'] = ''
config['user'] = db_read_user config['passwd'] = db_read_pass
def get_db_vt_list_xml(self, host, port, db_name): self.server_logger.info("Request: get_db_vistrail_list(%s,%s,%s)"%(host, port, db_name)) config = {} config['host'] = host config['port'] = int(port) config['db'] = db_name config['user'] = 'vtserver' config['passwd'] = '' try: rows = io.get_db_vistrail_list(config) result = '<vistrails>' for (id, name, mod_time) in rows: result += '<vistrail id="%s" name="%s" mod_time="%s" />'%(id,name,mod_time) result += '</vistrails>' return result except Exception, e: self.server_logger.info("Error: %s"%str(e)) return "FAILURE: %s" %str(e)
user='vtserver', passwd='',
user=db_read_user, passwd=db_read_pass,
def get_vt_tagged_versions(self, host, port, db_name, vt_id): self.server_logger.info("Request: get_vt_tagged_versions(%s,%s,%s,%s)"%(host, port, db_name, vt_id)) try: locator = DBLocator(host=host, port=int(port), database=db_name, user='vtserver', passwd='', obj_id=int(vt_id), obj_type=None, connection_id=None)
self.perm_view.setEnabled(False) self.perm_edit.setEnabled(False) self.perm_download.setEnabled(False)
self._default_perm_label.setText("Default Global Permissions " "(only applicable to branching):")
def check_dependencies(self): """ determines if current VisTrail will be supported by the repository's VisTrail server """
def push_vistrail_to_repository(self):
def push_vistrail_to_repository(self, branching=False):
def push_vistrail_to_repository(self): """ uploads current VisTrail to web repository """
'repository_vt_id': repository_vt_id,
'repository_vt_id': repository_vt_id if not branching else -1,
def push_vistrail_to_repository(self): """ uploads current VisTrail to web repository """
"""
controller.recompute_terse_graph() controller.invalidate_version_tree()
def push_vistrail_to_repository(self): """ uploads current VisTrail to web repository """
"""
def push_vistrail_to_repository(self): """ uploads current VisTrail to web repository """
self.dialog.cookiejar = cookielib.CookieJar()
self.dialog.cookiejar = cookielib.CookieJar() self.dialog.cookie_url = self.config.webRepositoryURL
def clicked_on_login(self): """ Attempts to log into web repository stores auth cookie for session """ from gui.application import VistrailsApplication
self._push_tab = self.create_push_tab()
def __init__(self, parent): QtGui.QDialog.__init__(self, parent) self._status_bar = QtGui.QStatusBar(self) self.setWindowTitle('Web Repository Options') layout = QtGui.QHBoxLayout(self) layout.setMargin(0) layout.setSpacing(0) self.setLayout(layout)
def handle_missing_module(m_name, m_namespace):
def handle_missing_module(*args, **kwargs):
def handle_missing_module(m_name, m_namespace): global webServicesmodulesDict def get_wsdl_from_namespace(m_namespace): try: wsdl = m_namespace.split("|") return wsdl[0] except: print "invalid namespace" return None wsdl = get_wsdl_from_namespace(m_namespace) if wsdl: outdated_list = [] updated_list = [] error_list = [] print "Downloading %s and adding to the Module list..."%wsdl pathfile = os.path.join(core.system.default_dot_vistrails(), "webServices", "modules.conf") if os.path.isfile(pathfile): #Verify if there is a need to update the modules configuration #file (modules.conf) (outdated_list, updated_list, error_list) = verify_wsdl([wsdl]) #print "verified: createconfig file is %s"%createconfigfile else: #If the modules configuration file doesn't exist, create it outdated_list = [wsdl] #If the stub files are not updated or there is not information in # the header about the modification date of the web service, the # stubs files and a modules configuration file will be created # otherwise the information of the modules will be obtained from # the modules.conf files that contains serialized data of the methods # and the complex types of the web services # print outdated_list, updated_list, error_list if os.path.isfile(pathfile): try: inf = open(pathfile) webServicesmodulesDict = cPickle.load(inf) inf.close() except: print "Error loading configuration file" return False try: (res,not_loaded) = load_wsdl_no_config(updated_list) #print "done loading_no_config" if not res: outdated_list.extend([wsdl]) (res, not_loaded) = load_wsdl_with_config(outdated_list) #print "done loading_with_config" if res: #add new url to package config file wsdlList = [] if configuration.check('wsdlList'): wsdlList = configuration.wsdlList.split(";") if wsdl not in wsdlList: wsdlList.append(wsdl) swsdlList = ";".join(wsdlList) configuration.wsdlList = swsdlList print "done." return True else: msg = """ There were problems loading the webservice.
m = pipeline.modules[m_id] m_namespace = m.namespace
def get_wsdl_from_namespace(m_namespace): try: wsdl = m_namespace.split("|") return wsdl[0] except: print "invalid namespace" return None
if not cellWidget.label.hasScaledContents():
if (not cellWidget.label.hasScaledContents() and not cellWidget._playing):
def updateStatus(self, info): """ updateStatus(info: tuple) -> None Updates the status of the button based on the input info """ (sheet, row, col, cellWidget) = info if cellWidget: if not cellWidget.label.hasScaledContents(): self.setEnabled(True) originalWidth = cellWidget.originalPix.width() self.setValue(cellWidget.label.pixmap().width()*100/originalWidth) else: self.setEnabled(False) self.setValue(100)
self.flush_move_actions()
self.flush_delayed_actions()
def create_group(self, module_ids, connection_ids): self.flush_move_actions() (group, connections) = \ BaseController.create_group(self, self.current_pipeline, module_ids, connection_ids) op_list = [] op_list.extend(('delete', self.current_pipeline.connections[c_id]) for c_id in connection_ids) op_list.extend(('delete', self.current_pipeline.modules[m_id]) for m_id in module_ids) op_list.append(('add', group)) op_list.extend(('add', c) for c in connections) action = core.db.action.create_action(op_list) self.add_new_action(action)
self.flush_move_actions()
self.flush_delayed_actions()
def create_abstraction(self, module_ids, connection_ids, name): self.flush_move_actions() (abstraction, connections) = \ BaseController.create_abstraction(self, self.current_pipeline, module_ids, connection_ids, name) op_list = [] op_list.extend(('delete', self.current_pipeline.connections[c_id]) for c_id in connection_ids) op_list.extend(('delete', self.current_pipeline.modules[m_id]) for m_id in module_ids) op_list.append(('add', abstraction)) op_list.extend(('add', c) for c in connections) action = core.db.action.create_action(op_list) self.add_new_action(action) result = self.perform_action(action) return abstraction
self.flush_move_actions()
self.flush_delayed_actions()
def create_abstraction_from_group(self, group_id, name=""): self.flush_move_actions() name = self.get_abstraction_name(name) (abstraction, connections) = \ BaseController.create_abstraction_from_group(self, self.current_pipeline, group_id, name)
self.flush_move_actions()
self.flush_delayed_actions()
def ungroup_set(self, module_ids): self.flush_move_actions() for m_id in module_ids: self.create_ungroup(m_id)
self.flush_move_actions()
self.flush_delayed_actions()
def update_notes(self, notes): """ Parameters ----------
self.flush_move_actions()
self.flush_delayed_actions()
def execute_current_workflow(self, custom_aliases=None): """ execute_current_workflow() -> None Execute the current workflow (if exists) """ self.flush_move_actions() if self.current_pipeline: locator = self.get_locator() if locator: locator.clean_temporaries() locator.save_temporary(self.vistrail) self.execute_workflow_list([(self.locator, self.current_version, self.current_pipeline, self.current_pipeline_view, custom_aliases, None)])
self.flush_move_actions()
self.flush_delayed_actions()
def update_current_tag(self,tag): """ update_current_tag(tag: str) -> Bool Update the current vistrail tag and return success predicate """ self.flush_move_actions() try: if self.vistrail.hasTag(self.current_version): self.vistrail.changeTag(tag, self.current_version) else: self.vistrail.addTag(tag, self.current_version) except TagExists: show_warning('Name Exists', "There is already another version named '%s'.\n" "Please enter a different one." % tag) return False self.set_changed(True) self.recompute_terse_graph() self.invalidate_version_tree(False) return True
self.flush_move_actions()
self.flush_delayed_actions()
def copy_modules_and_connections(self, module_ids, connection_ids): """copy_modules_and_connections(module_ids: [long], connection_ids: [long]) -> str Serializes a list of modules and connections """ self.flush_move_actions()
self.flush_move_actions()
self.flush_delayed_actions()
def paste_modules_and_connections(self, str, center): """ paste_modules_and_connections(str, center: (float, float)) -> [id list] Paste a list of modules and connections into the current pipeline.
if op.vtType == 'add': f(op.data, op.parentObjId, op.parentObjType) elif op.vtType == 'delete': f(op.objectId, op.parentObjId, op.parentObjType) elif op.vtType == 'change': f(op.oldObjId, op.data, op.parentObjId, op.parentObjType)
def perform_operation(self, op): # print "doing %s %s %s" % (op.id, op.vtType, op.what) if op.db_what == 'abstraction' or op.db_what == 'group': what = 'module' else: what = op.db_what funname = '%s_%s' % (op.vtType, what)
if op.vtType == 'add': f(op.data, op.parentObjType, op.parentObjId) elif op.vtType == 'delete': f(op.objectId, op.what, op.parentObjType, op.parentObjId) elif op.vtType == 'change': f(op.oldObjId, op.data, op.parentObjType, op.parentObjId)
def perform_operation(self, op): # print "doing %s %s %s" % (op.id, op.vtType, op.what) if op.db_what == 'abstraction' or op.db_what == 'group': what = 'module' else: what = op.db_what funname = '%s_%s' % (op.vtType, what)
msg = "Pipeline cannot execute '%s' operation" % op.vtType
msg = "Pipeline cannot execute '%s %s' operation" % \ (op.vtType, op.what)
def perform_operation(self, op): # print "doing %s %s %s" % (op.id, op.vtType, op.what) if op.db_what == 'abstraction' or op.db_what == 'group': what = 'module' else: what = op.db_what funname = '%s_%s' % (op.vtType, what)
raise VistrailsInternalError("duplicate module id")
raise VistrailsInternalError("duplicate module id: %d" % m.id )
def add_module(self, m, *args): """add_module(m: Module) -> None Add new module to pipeline """ if self.has_module_with_id(m.id): raise VistrailsInternalError("duplicate module id")
def add_parameter(self, param, parent_id, parent_type=ModuleFunction.vtType):
def add_parameter(self, param, parent_type, parent_id):
def add_parameter(self, param, parent_id, parent_type=ModuleFunction.vtType): self.db_add_object(param, parent_type, parent_id) if not self.has_alias(param.alias): self.change_alias(param.alias, param.vtType, param.real_id, parent_type, parent_id, None)
def delete_parameter(self, param_id, parent_id, parent_type=ModuleFunction.vtType):
def delete_parameter(self, param_id, param_type, parent_type, parent_id):
def delete_parameter(self, param_id, parent_id, parent_type=ModuleFunction.vtType): self.db_delete_object(param_id, ModuleParam.vtType, parent_type, parent_id) self.remove_alias(ModuleParam.vtType, param_id, parent_type, parent_id, None)
self.remove_alias(ModuleParam.vtType, param_id, parent_type, parent_id, None) def change_parameter(self, old_param_id, param, parent_id, parent_type=ModuleFunction.vtType):
self.remove_alias(ModuleParam.vtType, param_id, parent_type, parent_id, None) def change_parameter(self, old_param_id, param, parent_type, parent_id):
def delete_parameter(self, param_id, parent_id, parent_type=ModuleFunction.vtType): self.db_delete_object(param_id, ModuleParam.vtType, parent_type, parent_id) self.remove_alias(ModuleParam.vtType, param_id, parent_type, parent_id, None)
def add_port(self, port, parent_id, parent_type=Connection.vtType):
def add_port(self, port, parent_type, parent_id):
def add_port(self, port, parent_id, parent_type=Connection.vtType): self.db_add_object(port, parent_type, parent_id) connection = self.connections[parent_id] if connection.source is not None and \ connection.destination is not None: self.graph.add_edge(connection.sourceId, connection.destinationId, connection.id)
def delete_port(self, port_id, parent_id, parent_type=Connection.vtType):
def delete_port(self, port_id, port_type, parent_type, parent_id):
def delete_port(self, port_id, parent_id, parent_type=Connection.vtType): connection = self.connections[parent_id] if len(connection.ports) >= 2: self.graph.delete_edge(connection.sourceId, connection.destinationId, connection.id) self.db_delete_object(port_id, Port.vtType, parent_type, parent_id)
def change_port(self, old_port_id, port, parent_id, parent_type=Connection.vtType):
def change_port(self, old_port_id, port, parent_type, parent_id):
def change_port(self, old_port_id, port, parent_id, parent_type=Connection.vtType): connection = self.connections[parent_id] if len(connection.ports) >= 2: source_list = self.graph.adjacency_list[connection.sourceId] source_list.remove((connection.destinationId, connection.id)) dest_list = \ self.graph.inverse_adjacency_list[connection.destinationId] dest_list.remove((connection.sourceId, connection.id)) self.db_change_object(old_port_id, port, parent_type, parent_id) if len(connection.ports) >= 2: source_list = self.graph.adjacency_list[connection.sourceId] source_list.append((connection.destinationId, connection.id)) dest_list = \ self.graph.inverse_adjacency_list[connection.destinationId] dest_list.append((connection.sourceId, connection.id))
def add_portSpec(self, port_spec, parent_id, parent_type=Module.vtType):
def add_portSpec(self, port_spec, parent_type, parent_id):
def add_portSpec(self, port_spec, parent_id, parent_type=Module.vtType): # self.db_add_object(port_spec, parent_type, parent_id) self.add_port_to_registry(port_spec, parent_id)
def delete_portSpec(self, spec_id, parent_id, parent_type=Module.vtType):
def delete_portSpec(self, spec_id, portSpec_type, parent_type, parent_id):
def delete_portSpec(self, spec_id, parent_id, parent_type=Module.vtType): self.delete_port_from_registry(spec_id, parent_id) # self.db_delete_object(spec_id, PortSpec.vtType, parent_type, parent_id)
def change_portSpec(self, old_spec_id, port_spec, parent_id, parent_type=Module.vtType):
def change_portSpec(self, old_spec_id, port_spec, parent_type, parent_id):
def change_portSpec(self, old_spec_id, port_spec, parent_id, parent_type=Module.vtType): self.delete_port_from_registry(old_spec_id, parent_id) # self.db_change_object(old_spec_id, port_spec, parent_type, parent_id) self.add_port_to_registry(port_spec, parent_id)
striplen = len("<paramexps>") xmlString = xmlString[striplen:-(striplen+1)].strip() self.setParameterExploration(xmlString)
if xmlString is not None: striplen = len("<paramexps>") xmlString = xmlString[striplen:-(striplen+1)].strip() self.setParameterExploration(xmlString)
def showEvent(self, event): """ showEvent(event: QShowEvent) -> None Update the tab when it is shown """ if self.currentVersion!=self.controller.current_version: self.currentVersion = self.controller.current_version # Update the virtual cell pipeline = self.controller.current_pipeline self.virtualCell.updateVirtualCell(pipeline)
cmdline = ['unzip', '-q','-o','-d', vt_save_dir, shell_quote(filename)]
cmdline = ['unzip', '-q','-o','-d', vt_save_dir, mkarg(filename).strip()]
def open_vistrail_bundle_from_zip_xml(filename): """open_vistrail_bundle_from_zip_xml(filename) -> SaveBundle Open a vistrail from a zip compressed format. It expects that the vistrail file inside archive has name 'vistrail', the log inside archive has name 'log', abstractions inside archive have prefix 'abstraction_', and thumbnails inside archive are '.png' files in 'thumbs' dir """ core.requirements.require_executable('unzip') vt_save_dir = tempfile.mkdtemp(prefix='vt_save') output = [] cmdline = ['unzip', '-q','-o','-d', vt_save_dir, shell_quote(filename)] result = execute_cmdline(cmdline, output) if result != 0 and len(output) != 0: raise VistrailsDBException("Unzip of '%s' failed" % filename) vistrail = None log = None log_fname = None abstraction_files = [] unknown_files = [] thumbnail_files = [] try: for root, dirs, files in os.walk(vt_save_dir): for fname in files: if fname == 'vistrail' and root == vt_save_dir: vistrail = open_vistrail_from_xml(os.path.join(root, fname)) elif fname == 'log' and root == vt_save_dir: # FIXME read log to get execution info # right now, just ignore the file log = None log_fname = os.path.join(root, fname) # log = open_log_from_xml(os.path.join(root, fname)) # objs.append(DBLog.vtType, log) elif fname.startswith('abstraction_'): abstraction_file = os.path.join(root, fname) abstraction_files.append(abstraction_file) elif (fname.endswith('.png') and root == os.path.join(vt_save_dir,'thumbs')): thumbnail_file = os.path.join(root, fname) thumbnail_files.append(thumbnail_file) else: unknown_files.append(os.path.join(root, fname)) except OSError, e: raise VistrailsDBException("Error when reading vt file") if len(unknown_files) > 0: raise VistrailsDBException("Unknown files in vt file: %s" % \ unknown_files) if vistrail is None: raise VistrailsDBException("vt file does not contain vistrail") vistrail.db_log_filename = log_fname save_bundle = SaveBundle(DBVistrail.vtType, vistrail, log, abstractions=abstraction_files, thumbnails=thumbnail_files) return (save_bundle, vt_save_dir)
def create_action_from_ops(ops):
def create_action_from_ops(ops, simplify=False):
def create_action_from_ops(ops): if len(ops) > 0: from core.vistrail.action import Action action = db.services.action.create_action_from_ops(ops) Action.convert(action) return action return None
action = db.services.action.create_action_from_ops(ops)
action = db.services.action.create_action_from_ops(ops, simplify)
def create_action_from_ops(ops): if len(ops) > 0: from core.vistrail.action import Action action = db.services.action.create_action_from_ops(ops) Action.convert(action) return action return None
self.server_logger.debug(4)
def get_vt_graph_png(self, host, port, db_name, vt_id): """get_vt_graph_png(host:str, port: str, db_name: str, vt_id:str) -> str Returns the relative url of the generated image """
except ImportError, e: self._init_module = self._module
def initialize(self, existing_paths=None): self._override_import(existing_paths) try: try: name = self.prefix + self.codepath + '.init' __import__(name, globals(), locals(), []) self._init_module = sys.modules[name] self._imported_paths.add(name) # Copy attributes (shallow) from _module into _init_module's namespace and point _module to _init_module module_attributes = ['identifier', 'name', 'version', 'configuration', 'package_dependencies', 'package_requirements'] for attr in module_attributes: if hasattr(self._module, attr): setattr(self._init_module, attr, getattr(self._module, attr)) self._module = self._init_module except ImportError, e: self._init_module = self._module
self.display(RichTextCellWidget, (fileValue,))
self.cellWidget = self.displayAndWait(RichTextCellWidget, (fileValue,))
def compute(self): """ compute() -> None Dispatch the HTML contents to the spreadsheet """ if self.hasInputFromPort("File"): fileValue = self.getInputFromPort("File") else: fileValue = None self.display(RichTextCellWidget, (fileValue,))
if m1.db_tag != m2.db_tag: return True
def module_text_has_changed(self, m1, m2): # 2008-06-25 cscheid # This is a hot-path for QPipelineView.setupScene, so we cut # some corners # # 2008-07-08 cscheid # Can't use direct __dict__ lookup because both groups and # modules end up here. if m1.db_tag != m2.db_tag: return True m1_has = '__desc__' in m1.db_annotations_key_index if (m1_has != '__desc__' in m2.db_annotations_key_index): return True if (m1_has and # m2_has, since m1_has and previous condition m1.db_annotations_key_index['__desc__'].value.strip()!= m2.db_annotations_key_index['__desc__'].value.strip()): return True return False
if (m1_has != '__desc__' in m2.db_annotations_key_index):
if m1_has != ('__desc__' in m2.db_annotations_key_index):
def module_text_has_changed(self, m1, m2): # 2008-06-25 cscheid # This is a hot-path for QPipelineView.setupScene, so we cut # some corners # # 2008-07-08 cscheid # Can't use direct __dict__ lookup because both groups and # modules end up here. if m1.db_tag != m2.db_tag: return True m1_has = '__desc__' in m1.db_annotations_key_index if (m1_has != '__desc__' in m2.db_annotations_key_index): return True if (m1_has and # m2_has, since m1_has and previous condition m1.db_annotations_key_index['__desc__'].value.strip()!= m2.db_annotations_key_index['__desc__'].value.strip()): return True return False
print str(e)
debug.critical(str(e))
def check_user_projects(self): # are we logged in? if not self.dialog.cookiejar: self._repository_status['support_status'] = "Please login" self._repository_status['details'] = "" self.update_push_information() self._push_button.setEnabled(False) self._branch_button.hide() else: server_url = "%s/projects/get_user_projects/" % \ self.config.webRepositoryURL register_openers(cookiejar=self.dialog.cookiejar) try: request = urllib2.Request(server_url) get_servers = urllib2.urlopen(request) except urllib2.HTTPError, e: self._repository_status['support_status'] = "" self._repository_status['details'] = "" if e.code == 500: self._repository_status['support_status'] = \ ("Error connecting to repository (server side issues)") else: print str(e)
self.serverCombo.addItem("%s (%s)" % (prj, srvr), srvr)
self.serverCombo.addItem("%s (%s)" % (prj, srvr), [prj, srvr])
def check_user_projects(self): # are we logged in? if not self.dialog.cookiejar: self._repository_status['support_status'] = "Please login" self._repository_status['details'] = "" self.update_push_information() self._push_button.setEnabled(False) self._branch_button.hide() else: server_url = "%s/projects/get_user_projects/" % \ self.config.webRepositoryURL register_openers(cookiejar=self.dialog.cookiejar) try: request = urllib2.Request(server_url) get_servers = urllib2.urlopen(request) except urllib2.HTTPError, e: self._repository_status['support_status'] = "" self._repository_status['details'] = "" if e.code == 500: self._repository_status['support_status'] = \ ("Error connecting to repository (server side issues)") else: print str(e)
server = self.serverCombo.itemData(index).toString()
server = self.serverCombo.itemData(index).toList()[1].toString()
def check_dependencies(self, index): """ determines if current VisTrail will be supported by the repository's VisTrail server """
print str(e)
debug.critical(str(e))
def check_dependencies(self, index): """ determines if current VisTrail will be supported by the repository's VisTrail server """
server_packages = get_supported_packages.read().split("||")
server_packages = json.loads(get_supported_packages.read())
def check_dependencies(self, index): """ determines if current VisTrail will be supported by the repository's VisTrail server """
print "the vistrail has been deleted or doesn't exist"
def check_dependencies(self, index): """ determines if current VisTrail will be supported by the repository's VisTrail server """
project = self.serverCombo.itemData(self.serverCombo.currentIndex()).toList()[0].toString() if project == "Default": project = ""
def push_vistrail_to_repository(self, branching=False): """ uploads current VisTrail to web repository """
print "before check"
def push_vistrail_to_repository(self, branching=False): """ uploads current VisTrail to web repository """
print "getting version from web"
debug.log("getting version from web")
def push_vistrail_to_repository(self, branching=False): """ uploads current VisTrail to web repository """
print e
debug.critical(str(e))
def push_vistrail_to_repository(self, branching=False): """ uploads current VisTrail to web repository """
print "repo login is %s" % self.config.webRepositoryLogin
def __init__(self, parent, status_bar, dialog): QtGui.QWidget.__init__(self, parent) self._status_bar = status_bar self.dialog = dialog
print self.saveLogin.checkState()
print "save login state: ", self.saveLogin.checkState()
def clicked_on_login(self): """ Attempts to log into web repository stores auth cookie for session """ from gui.application import VistrailsApplication
print "settings repo login"
print "setting repo login"
def clicked_on_login(self): """ Attempts to log into web repository stores auth cookie for session """ from gui.application import VistrailsApplication
debug.critical("Couldn't start the instance on display: %s port: %s" % (virtual_display, port)
debug.critical("Couldn't start the instance on display: %s port: %s" % (virtual_display, port))
def start_other_instances(self, number): self.others = [] host = self.temp_xml_rpc_options.server port = self.temp_xml_rpc_options.port virtual_display = 5 script = '/server/vistrails/trunk/scripts/start_vistrails_xvfb.sh' for x in xrange(number): port += 1 virtual_display += 1 args = [script,":%s"%virtual_display,host,str(port),'0', '0'] try: p = subprocess.Popen(args) time.sleep(20) self.others.append("http://%s:%s"%(host,port)) except Exception, e: debug.critical("Couldn't start the instance on display: %s port: %s" % (virtual_display, port) debug.critical("Exception: %s" % str(e))
try: self.updateFunctionPort() except ModuleError, e: raise ModuleErrors([e, ModuleError(self, "Error inside map: " + e.msg)])
self.updateFunctionPort()
def compute(self): """The compute method for the Fold."""
'int' : core.modules.basic_modules.Integer, 'short' : core.modules.basic_modules.Integer, 'byte' : core.modules.basic_modules.Integer, 'nonNegativeInteger' : core.modules.basic_modules.Integer, 'long' : core.modules.basic_modules.Integer, 'unsignedLong' : core.modules.basic_modules.Integer,
def addPortsToMethods(w): """ Add input and output ports to the VisTrails complex type modules. """ reg = core.modules.module_registry.get_module_registry() dictkey = w + "|Methods" complexsdict = webServicesmodulesDict[dictkey] keys = complexsdict.keys() for dictkey in keys: obj = complexsdict[dictkey] objtype = reg.get_module_by_name(identifier = identifier, name = obj.name, namespace = obj.namespace) #Add input ports for port in obj.ports[0]: try: nameport = str(port.name) Type = str(port.type[1]) if isArray(Type): Type = 'Array' Type = wsdlTypesDict[Type] reg.add_input_port(objtype,nameport,(Type, '')) except KeyError: try: modname = str(port.type[1]) dictkey = w + "|Types" typedict = webServicesmodulesDict[dictkey] dictkey = w + "." + modname typeObj = typedict[dictkey] Type = reg.get_module_by_name(identifier=identifier, name=typeObj.name, namespace=typeObj.namespace) reg.add_input_port(objtype,str(port.name),(Type, '')) except KeyError: pass #Add output ports for port in obj.ports[1]: try: nameport = str(port.name) Type = str(port.type[1]) if isArray(Type): Type = 'Array' Type = wsdlTypesDict[Type] reg.add_output_port(objtype,nameport,(Type, '')) except KeyError: try: modname = str(port.type[1]) dictkey = w + "|Types" typedict = webServicesmodulesDict[dictkey] dictkey = w + "." + modname typeObj = typedict[dictkey] Type = reg.get_module_by_name(identifier=identifier, name=typeObj.name, namespace=typeObj.namespace) reg.add_output_port(objtype,str(port.name),(Type, '')) except KeyError: pass
'IDREFS': core.modules.basic_modules.String, 'ID': core.modules.basic_modules.String,
def addPortsToMethods(w): """ Add input and output ports to the VisTrails complex type modules. """ reg = core.modules.module_registry.get_module_registry() dictkey = w + "|Methods" complexsdict = webServicesmodulesDict[dictkey] keys = complexsdict.keys() for dictkey in keys: obj = complexsdict[dictkey] objtype = reg.get_module_by_name(identifier = identifier, name = obj.name, namespace = obj.namespace) #Add input ports for port in obj.ports[0]: try: nameport = str(port.name) Type = str(port.type[1]) if isArray(Type): Type = 'Array' Type = wsdlTypesDict[Type] reg.add_input_port(objtype,nameport,(Type, '')) except KeyError: try: modname = str(port.type[1]) dictkey = w + "|Types" typedict = webServicesmodulesDict[dictkey] dictkey = w + "." + modname typeObj = typedict[dictkey] Type = reg.get_module_by_name(identifier=identifier, name=typeObj.name, namespace=typeObj.namespace) reg.add_input_port(objtype,str(port.name),(Type, '')) except KeyError: pass #Add output ports for port in obj.ports[1]: try: nameport = str(port.name) Type = str(port.type[1]) if isArray(Type): Type = 'Array' Type = wsdlTypesDict[Type] reg.add_output_port(objtype,nameport,(Type, '')) except KeyError: try: modname = str(port.type[1]) dictkey = w + "|Types" typedict = webServicesmodulesDict[dictkey] dictkey = w + "." + modname typeObj = typedict[dictkey] Type = reg.get_module_by_name(identifier=identifier, name=typeObj.name, namespace=typeObj.namespace) reg.add_output_port(objtype,str(port.name),(Type, '')) except KeyError: pass
'DBAction': {'annotations': update_annotations}}
'DBAction': {'annotations': update_annotations}, 'DBParameter': {'type': update_type}, }
def update_workflow(old_obj, translate_dict): return DBWorkflow.update_version(old_obj.db_workflow, translate_dict)
module_version = module.version.split('.')
def find_descriptors(pipeline, module_ids=None): registry = get_module_registry() conf = get_vistrails_configuration() if module_ids == None: module_ids = pipeline.modules.iterkeys() exceptions = set() for mid in module_ids: module = pipeline.modules[mid] try: descriptor = registry.get_similar_descriptor( module.package, module.name, module.namespace, module.version, module.internal_version) pkg = registry.get_package_by_name(module.package) pkg_version = pkg.version.split('.') module_version = module.version.split('.') # FIXME: this split('.') should be a function somewhere. # The goal is to be able to compare them lexicographically
if pkg_version < module_version:
if versions_increasing(pkg_version, module_version):
def find_descriptors(pipeline, module_ids=None): registry = get_module_registry() conf = get_vistrails_configuration() if module_ids == None: module_ids = pipeline.modules.iterkeys() exceptions = set() for mid in module_ids: module = pipeline.modules[mid] try: descriptor = registry.get_similar_descriptor( module.package, module.name, module.namespace, module.version, module.internal_version) pkg = registry.get_package_by_name(module.package) pkg_version = pkg.version.split('.') module_version = module.version.split('.') # FIXME: this split('.') should be a function somewhere. # The goal is to be able to compare them lexicographically
if pkg_version > module_version:
elif versions_increasing(module_version, pkg_version):
def find_descriptors(pipeline, module_ids=None): registry = get_module_registry() conf = get_vistrails_configuration() if module_ids == None: module_ids = pipeline.modules.iterkeys() exceptions = set() for mid in module_ids: module = pipeline.modules[mid] try: descriptor = registry.get_similar_descriptor( module.package, module.name, module.namespace, module.version, module.internal_version) pkg = registry.get_package_by_name(module.package) pkg_version = pkg.version.split('.') module_version = module.version.split('.') # FIXME: this split('.') should be a function somewhere. # The goal is to be able to compare them lexicographically
new_version = \
(new_version, new_pipeline) = \
def add_abstraction_to_registry(self, abs_vistrail, abs_fname, name, namespace=None, module_version=None, is_global=True, avail_fnames=[]): reg = core.modules.module_registry.get_module_registry() if namespace is None: namespace = \ abs_vistrail.get_annotation('__abstraction_uuid__').value
new_version)
new_version, new_pipeline)
def add_abstraction_to_registry(self, abs_vistrail, abs_fname, name, namespace=None, module_version=None, is_global=True, avail_fnames=[]): reg = core.modules.module_registry.get_module_registry() if namespace is None: namespace = \ abs_vistrail.get_annotation('__abstraction_uuid__').value
migrate_tags = get_vistrails_configuration().check("migrateTags")
should_migrate_tags = get_vistrails_configuration().check("migrateTags")
def flush_delayed_actions(self): start_version = self.current_version desc_key = Action.ANNOTATION_DESCRIPTION added_upgrade = False migrate_tags = get_vistrails_configuration().check("migrateTags") for action in self._delayed_actions: self.vistrail.add_action(action, start_version, self.current_session) # HACK to populate upgrade information if (action.has_annotation_with_key(desc_key) and action.get_annotation_by_key(desc_key).value == 'Upgrade'): self.vistrail.set_upgrade(start_version, str(action.id)) if migrate_tags: tag = self.vistrail.get_tag(start_version) if tag: self.vistrail.set_tag(start_version, "") self.vistrail.set_tag(action.id, tag) notes = self.vistrail.get_notes(start_version) if notes: self.vistrail.set_notes(start_version, "") self.vistrail.set_notes(action.id, notes) self.current_version = action.id start_version = action.id added_upgrade = True
if migrate_tags: tag = self.vistrail.get_tag(start_version) if tag: self.vistrail.set_tag(start_version, "") self.vistrail.set_tag(action.id, tag) notes = self.vistrail.get_notes(start_version) if notes: self.vistrail.set_notes(start_version, "") self.vistrail.set_notes(action.id, notes)
if should_migrate_tags: self.migrate_tags(start_version, action.id)
def flush_delayed_actions(self): start_version = self.current_version desc_key = Action.ANNOTATION_DESCRIPTION added_upgrade = False migrate_tags = get_vistrails_configuration().check("migrateTags") for action in self._delayed_actions: self.vistrail.add_action(action, start_version, self.current_session) # HACK to populate upgrade information if (action.has_annotation_with_key(desc_key) and action.get_annotation_by_key(desc_key).value == 'Upgrade'): self.vistrail.set_upgrade(start_version, str(action.id)) if migrate_tags: tag = self.vistrail.get_tag(start_version) if tag: self.vistrail.set_tag(start_version, "") self.vistrail.set_tag(action.id, tag) notes = self.vistrail.get_notes(start_version) if notes: self.vistrail.set_notes(start_version, "") self.vistrail.set_notes(action.id, notes) self.current_version = action.id start_version = action.id added_upgrade = True
ops.append(('add', new_module))
def replace_generic(controller, pipeline, old_module, new_module, function_remap={}, src_port_remap={}, dst_port_remap={}, annotation_remap={}): ops = [] ops.extend(controller.delete_module_list_ops(pipeline, [old_module.id])) ops.append(('add', new_module)) for annotation in old_module.annotations: if annotation.key not in annotation_remap: annotation_key = annotation.key else: remap = annotation_remap[annotation.key] if remap is None: # don't add the annotation back in continue elif type(remap) != type(""): ops.extend(remap(annotation)) continue else: annotation_key = remap