rem
stringlengths 0
477k
| add
stringlengths 0
313k
| context
stringlengths 6
599k
|
---|---|---|
return ApplicationDowntimeService.getInstance().getDowntimeRecorder() .isApplicationUp(appConfig); | DowntimeRecorder recorder = ApplicationDowntimeService.getInstance().getDowntimeRecorder(); boolean isUp = true; if (appConfig.isCluster()) { for (ApplicationConfig childAppConfig : appConfig.getApplications()) { if (!recorder.isApplicationUp(childAppConfig)) { isUp = false; break; } } } else { isUp = recorder.isApplicationUp(appConfig); } return isUp; | public static boolean isApplicationUp(ApplicationConfig appConfig) { return ApplicationDowntimeService.getInstance().getDowntimeRecorder() .isApplicationUp(appConfig); } |
logger.logActivity(username, user.getName()+" failed to login"); | public void login(ServiceContext context, String username, String password) throws ServiceException{ LoginCallbackHandler callbackHandler = new LoginCallbackHandler(); callbackHandler.setUsername(username); callbackHandler.setPassword(password); User user = null; UserManager userManager = UserManager.getInstance(); try{ LoginContext loginContext = new LoginContext(AuthConstants.AUTH_CONFIG_INDEX, callbackHandler); loginContext.login(); /* set Subject in session */ context._setSubject(loginContext.getSubject()); /* Successful login: update the lock count and status */ user = userManager.getUser(username); user.setLockCount(0); user.setStatus(null); userManager.updateUser(user); }catch(LoginException lex){ user = userManager.getUser(username); String errorCode = ErrorCodes.UNKNOWN_ERROR; Object[] values = null; /* Conditionalize the error message */ if(user == null){ errorCode = ErrorCodes.INVALID_CREDENTIALS; }else if("I".equals(user.getStatus())){ errorCode = ErrorCodes.ACCOUNT_LOCKED; }else if(user.getLockCount() < MAX_LOGIN_ATTEMPTS_ALLOWED){ int thisAttempt = user.getLockCount()+1; user.setLockCount(thisAttempt); if(thisAttempt == MAX_LOGIN_ATTEMPTS_ALLOWED){ user.setStatus("I"); userManager.updateUser(user); errorCode = ErrorCodes.ACCOUNT_LOCKED; }else{ userManager.updateUser(user); errorCode = ErrorCodes.INVALID_LOGIN_ATTEMPTS; values = new Object[]{ String.valueOf(MAX_LOGIN_ATTEMPTS_ALLOWED - thisAttempt)}; } } throw new ServiceException(errorCode, values); } } |
|
d.addObjectCreate("architect-settings/print-user-settings", PrintUserSettings.class); d.addCallMethod("architect-settings/print-user-settings/setting", "putProperty", 2); d.addCallParam("architect-settings/print-user-settings/setting", 0, "name"); d.addCallParam("architect-settings/print-user-settings/setting", 1, "value"); d.addSetNext("architect-settings/print-user-settings", "setPrintUserSettings", "ca.sqlpower.architect.PrintUserSettings"); | protected Digester setupDigester() { Digester d = new Digester(); d.setValidating(false); d.push(this); d.addObjectCreate("architect-settings", UserSettings.class); // jdbc drivers d.addCallMethod("architect-settings/jdbc-jar-files/jar", "addDriverJarPath", 0); // db connections d.addObjectCreate("architect-settings/db-connections/dbcs", DBConnectionSpec.class); d.addSetProperties ("architect-settings/db-connections/dbcs", new String[] {"connection-name", "driver-class", "jdbc-url", "user-name", "user-pass", "sequence-number", "single-login"}, new String[] {"displayName", "driverClass", "url", "user", "pass", "seqNo", "singleLogin"}); d.addCallMethod("architect-settings/db-connections/dbcs", "setName", 0); // argument is element body text d.addSetNext("architect-settings/db-connections/dbcs", "addConnection", "ca.sqlpower.sql.DBConnectionSpec"); // gui settings d.addObjectCreate("architect-settings/swing-gui-settings", SwingUserSettings.class); d.addCallMethod("architect-settings/swing-gui-settings/setting", "putSetting", 3); d.addCallParam("architect-settings/swing-gui-settings/setting", 0, "name"); d.addCallParam("architect-settings/swing-gui-settings/setting", 1, "class"); d.addCallParam("architect-settings/swing-gui-settings/setting", 2, "value"); d.addSetNext("architect-settings/swing-gui-settings", "setSwingSettings", "ca.sqlpower.architect.swingui.SwingUserSettings"); // ETL settings d.addObjectCreate("architect-settings/etl-user-settings", ETLUserSettings.class); d.addCallMethod("architect-settings/etl-user-settings/setting", "putProperty", 2); d.addCallParam("architect-settings/etl-user-settings/setting", 0, "name"); d.addCallParam("architect-settings/etl-user-settings/setting", 1, "value"); d.addSetNext("architect-settings/etl-user-settings", "setETLUserSettings", "ca.sqlpower.architect.etl.ETLUserSettings"); // ETL settings d.addObjectCreate("architect-settings/ddl-user-settings", DDLUserSettings.class); d.addCallMethod("architect-settings/ddl-user-settings/setting", "putProperty", 2); d.addCallParam("architect-settings/ddl-user-settings/setting", 0, "name"); d.addCallParam("architect-settings/ddl-user-settings/setting", 1, "value"); d.addSetNext("architect-settings/ddl-user-settings", "setDDLUserSettings", "ca.sqlpower.architect.ddl.DDLUserSettings"); d.addSetNext("architect-settings", "setUserSettings", "ca.sqlpower.architect.UserSettings"); return d; } |
|
writePrintUserSettings(us.getPrintUserSettings()); | public void write(ArchitectSession session) throws ArchitectException { UserSettings us = session.getUserSettings(); try { out = new PrintWriter(new FileWriter(file)); indent = 0; println("<?xml version=\"1.0\"?>"); println("<architect-settings version=\"0.1\">"); indent++; // generate XML directly from settings writeDriverJarPaths(session.getDriverJarList()); writeDbConnections(us.getConnections()); writeSwingSettings(us.getSwingSettings()); writeETLUserSettings(us.getETLUserSettings()); writeDDLUserSettings(us.getDDLUserSettings()); indent--; println("</architect-settings>"); } catch (IOException e) { throw new ArchitectException("Couldn't save settings", e); } finally { if (out != null) { out.flush(); out.close(); } out = null; } } |
|
List l = new ArrayList(1); | l = new ArrayList(1); | public void doTag(XMLOutput output) throws MissingAttributeException, JellyTagException { if (var == null) { throw new MissingAttributeException( "var" ); } if (select == null) { throw new MissingAttributeException( "select" ); } Object xpathContext = getXPathContext(); Object value = null; try { if(single!=null && single.booleanValue()==true) { value = select.selectSingleNode(xpathContext); } else { value = select.evaluate(xpathContext); } } catch (JaxenException e) { throw new JellyTagException(e); } if (value instanceof List) { // sort the list if xpCmp is set. if (xpCmp != null && (xpCmp.getXpath() != null)) { Collections.sort((List)value, xpCmp); } } if (single!=null) { if (single.booleanValue()==true) { if(value instanceof List) { List l = (List) value; if (l.size()==0) value=null; else value=l.get(0); } } else { // single == false if(! (value instanceof List) ) { if (value==null) { l = new ArrayList(0); } else { List l = new ArrayList(1); l.add(value); } value = l; } } } //log.info( "Evaluated xpath: " + select + " as: " + value + " of type: " + value.getClass().getName() ); context.setVariable(var, value); } |
if(thisPair == null){ continue; } | Vector do4Gamete(){ Vector blocks = new Vector(); Vector strongPairs = new Vector(); //first make a list of marker pairs with < 4 gametes, sorted by distance apart for (int x = 0; x < dPrime.length-1; x++){ for (int y = x+1; y < dPrime.length; y++){ PairwiseLinkage thisPair = dPrime[x][y]; if (thisPair == null){ continue; } double[] freqs = thisPair.getFreqs(); int numGam = 0; for (int i = 0; i < freqs.length; i++){ if (freqs[i] > fourGameteCutoff) numGam++; } //color in squares if(numGam > 3){ thisPair.setColor(Color.white); }else{ thisPair.setColor(Color.darkGray); } if (numGam > 3){ continue; } Vector addMe = new Vector(); //a vector of x, y, separation int sep = y - x - 1; //compute separation of two markers addMe.add(String.valueOf(x)); addMe.add(String.valueOf(y)); addMe.add(String.valueOf(sep)); if (strongPairs.size() == 0){ //put first pair first strongPairs.add(addMe); }else{ //sort by descending separation of markers in each pair boolean unplaced = true; for (int v = 0; v < strongPairs.size(); v ++){ if (sep >= Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(2))){ strongPairs.insertElementAt(addMe, v); unplaced = false; break; } } if (unplaced) {strongPairs.add(addMe);} } } } //now take this list of pairs with 3 gametes and construct blocks boolean[] usedInBlock = new boolean[dPrime.length + 1]; for (int v = 0; v < strongPairs.size(); v++){ boolean isABlock = true; int first = Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(0)); int last = Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(1)); //first see if this block overlaps with another: if (usedInBlock[first] || usedInBlock[last]) continue; //test this block. for (int y = first+1; y <= last; y++){ //loop over columns in row y for (int x = first; x < y; x++){ PairwiseLinkage thisPair = dPrime[x][y]; double[] freqs = thisPair.getFreqs(); int numGam = 0; for (int i = 0; i < freqs.length; i++){ if (freqs[i] > fourGameteCutoff) numGam++; } if (numGam > 3){ isABlock = false; } } } if (isABlock){ //add to the block list, but in order by first marker number: if (blocks.size() == 0){ //put first block first blocks.add(first + " " + last); }else{ //sort by ascending separation of markers in each pair boolean placed = false; for (int b = 0; b < blocks.size(); b ++){ StringTokenizer st = new StringTokenizer((String)blocks.elementAt(b)); if (first < Integer.parseInt(st.nextToken())){ blocks.insertElementAt(first + " " + last, b); placed = true; break; } } //make sure to put in blocks which fall on the tail end if (!placed) blocks.add(first + " " + last); } for (int used = first; used <= last; used++){ usedInBlock[used] = true; } } } return stringVec2intVec(blocks); } |
|
if (thisPair == null){ continue; } | Vector doSFS(){ double cutHighCI = 0.98; double cutLowCI = 0.70; double mafThresh = 0.10; double[] cutLowCIVar = {0,0,0.80,0.50,0.50}; double[] maxDist = {0,0,20000,30000,1000000}; double recHighCI = 0.90; int numStrong = 0; int numRec = 0; int numInGroup = 0; Vector blocks = new Vector(); Vector strongPairs = new Vector(); //first set up a filter of markers which fail the MAF threshhold boolean[] skipMarker = new boolean[dPrime.length]; for (int x = 0; x < dPrime.length; x++){ if (((SNP)markerInfo.elementAt(x)).getMAF() < mafThresh){ skipMarker[x]=true; }else{ skipMarker[x]=false; } } //next make a list of marker pairs in "strong LD", sorted by distance apart for (int x = 0; x < dPrime.length-1; x++){ for (int y = x+1; y < dPrime.length; y++){ PairwiseLinkage thisPair = dPrime[x][y]; if (thisPair == null){ continue; } //get the right bits double lod = thisPair.getLOD(); double lowCI = thisPair.getConfidenceLow(); double highCI = thisPair.getConfidenceHigh(); //color in squares if (lowCI > cutLowCI && highCI >= cutHighCI) { thisPair.setColor(new Color(224, 0, 0)); //strong LD }else if (highCI > recHighCI) { thisPair.setColor(new Color(192, 192, 240)); //uninformative } else { thisPair.setColor(Color.white); //recomb } if (skipMarker[x] || skipMarker[y]) continue; if (lod < -90) continue; //missing data if (highCI < cutHighCI || lowCI < cutLowCI) continue; //must pass "strong LD" test Vector addMe = new Vector(); //a vector of x, y, separation long sep; //compute actual separation sep = ((SNP)markerInfo.elementAt(y)).getPosition() - ((SNP)markerInfo.elementAt(x)).getPosition(); addMe.add(String.valueOf(x)); addMe.add(String.valueOf(y)); addMe.add(String.valueOf(sep)); if (strongPairs.size() == 0){ //put first pair first strongPairs.add(addMe); }else{ //sort by descending separation of markers in each pair boolean unplaced = true; for (int v = 0; v < strongPairs.size(); v ++){ if (sep >= Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(2))){ strongPairs.insertElementAt(addMe, v); unplaced = false; break; } } if (unplaced){strongPairs.add(addMe);} } } } //now take this list of pairs with "strong LD" and construct blocks boolean[] usedInBlock = new boolean[dPrime.length + 1]; Vector thisBlock; int[] blockArray; for (int v = 0; v < strongPairs.size(); v++){ numStrong = 0; numRec = 0; numInGroup = 0; thisBlock = new Vector(); int first = Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(0)); int last = Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(1)); int sep = Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(2)); //first see if this block overlaps with another: if (usedInBlock[first] || usedInBlock[last]) continue; //next, count the number of markers in the block. for (int x = first; x <=last ; x++){ if(!skipMarker[x]) numInGroup++; } //skip it if it is too long in bases for it's size in markers if (numInGroup < 4 && sep > maxDist[numInGroup]) continue; thisBlock.add(new Integer(first)); //test this block. requires 95% of informative markers to be "strong" for (int y = first+1; y <= last; y++){ if (skipMarker[y]) continue; thisBlock.add(new Integer(y)); //loop over columns in row y for (int x = first; x < y; x++){ if (skipMarker[x]) continue; PairwiseLinkage thisPair = dPrime[x][y]; //get the right bits double lod = thisPair.getLOD(); double lowCI = thisPair.getConfidenceLow(); double highCI = thisPair.getConfidenceHigh(); if (lod < -90) continue; //monomorphic marker error if (lod == 0 && lowCI == 0 && highCI == 0) continue; //skip bad markers //for small blocks use different CI cutoffs if (numInGroup < 5){ if (lowCI > cutLowCIVar[numInGroup] && highCI >= cutHighCI) numStrong++; }else{ if (lowCI > cutLowCI && highCI >= cutHighCI) numStrong++; //strong LD } if (highCI < recHighCI) numRec++; //recombination } } //change the definition somewhat for small blocks if (numInGroup > 3){ if (numStrong + numRec < 6) continue; }else if (numInGroup > 2){ if (numStrong + numRec < 3) continue; }else{ if (numStrong + numRec < 1) continue; } blockArray = new int[thisBlock.size()]; for (int z = 0; z < thisBlock.size(); z++){ blockArray[z] = ((Integer)thisBlock.elementAt(z)).intValue(); } // System.out.println(first + " " + last + " " + numStrong + " " + numRec); if ((double)numStrong/(double)(numStrong + numRec) > 0.95){ //this qualifies as a block //add to the block list, but in order by first marker number: if (blocks.size() == 0){ //put first block first blocks.add(blockArray); }else{ //sort by ascending separation of markers in each pair boolean placed = false; for (int b = 0; b < blocks.size(); b ++){ if (first < ((int[])blocks.elementAt(b))[0]){ blocks.insertElementAt(blockArray, b); placed = true; break; } } //make sure to put in blocks which fall on the tail end if (!placed) blocks.add(blockArray); } for (int used = first; used <= last; used++){ usedInBlock[used] = true; } } } return blocks; } |
|
|| c[i] instanceof SQLRelationship) { | || (c[i] instanceof SQLRelationship && (((SQLTable.Folder) e.getSource()).getType() == SQLTable.Folder.EXPORTED_KEYS))) { | public void dbChildrenInserted(SQLObjectEvent e) { logger.debug("SQLObject children got inserted: "+e); boolean fireEvent = false; SQLObject[] c = e.getChildren(); for (int i = 0; i < c.length; i++) { try { addHierarcyListeners(c[i]); } catch (ArchitectException ex) { logger.error("Couldn't listen to added object", ex); } if (c[i] instanceof SQLTable || c[i] instanceof SQLRelationship) { fireEvent = true; PlayPenComponent ppc = removedComponents.get(c[i]); if (ppc != null) { contentPane.add(ppc, contentPane.getComponentCount()); } } } if (fireEvent) { firePropertyChange("model.children", null, null); revalidate(); } } |
mi = new JMenuItem("Show PlayPen Components"); mi.setActionCommand(ArchitectSwingConstants.ACTION_COMMAND_SRC_PLAYPEN); mi.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { StringBuffer componentList = new StringBuffer(); for (int i = 0; i < contentPane.getComponentCount(); i++) { PlayPenComponent c = contentPane.getComponent(i); componentList.append(c).append("["+c.getModel()+"]\n"); } JOptionPane.showMessageDialog(PlayPen.this, new JScrollPane(new JTextArea(componentList.toString()))); } }); playPenPopup.add(mi); | protected void setupPlayPenPopup() { ArchitectFrame af = ArchitectFrame.getMainInstance(); playPenPopup = new JPopupMenu(); JMenuItem mi = new JMenuItem(); mi.setAction(chooseDBCSAction); playPenPopup.add(mi); mi = new JMenuItem(); mi.setAction(af.createTableAction); playPenPopup.add(mi); if (logger.isDebugEnabled()) { playPenPopup.addSeparator(); mi = new JMenuItem("Show Relationships"); mi.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { JOptionPane.showMessageDialog(PlayPen.this, new JScrollPane(new JList(new java.util.Vector(getRelationships())))); } }); playPenPopup.add(mi); } } |
|
public void actionPerformed(ActionEvent e) { showDbcsDialog(); } | public void actionPerformed(ActionEvent evt) { StringBuffer componentList = new StringBuffer(); for (int i = 0; i < contentPane.getComponentCount(); i++) { PlayPenComponent c = contentPane.getComponent(i); componentList.append(c).append("["+c.getModel()+"]\n"); } JOptionPane.showMessageDialog(PlayPen.this, new JScrollPane(new JTextArea(componentList.toString()))); } | public void actionPerformed(ActionEvent e) { showDbcsDialog(); } |
table.setValueAt(new Boolean(true),j,CAPTURE_COL); | public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command.equals("Run Tagger")) { try{ double rsqCut = Double.parseDouble(rsqField.getText()); if (rsqCut > 1){ Options.setTaggerRsqCutoff(1.0); rsqField.setText("1.0"); }else if (rsqCut < 0){ Options.setTaggerRsqCutoff(0.0); rsqField.setText("0.0"); }else{ Options.setTaggerRsqCutoff(rsqCut); } double lodCut = Double.parseDouble(lodField.getText()); if (lodCut < 0){ Options.setTaggerLODCutoff(0.0); lodField.setText("0.0"); }else{ Options.setTaggerLODCutoff(lodCut); } int maxNumTags; if (maxNumTagsField.getText().equals("")){ maxNumTags = 0; }else{ maxNumTags = Integer.parseInt(maxNumTagsField.getText()); } //build include/exclude lists Vector include = new Vector(); Vector exclude = new Vector(); Vector capture = new Vector(); for(int i= 0;i <table.getRowCount(); i++) { if(((Boolean)table.getValueAt(i,INCLUDE_COL)).booleanValue()) { include.add((String)table.getValueAt(i,NAME_COL)); }else if(((Boolean)table.getValueAt(i,EXCLUDE_COL)).booleanValue()) { exclude.add((String)table.getValueAt(i,NAME_COL)); } if (((Boolean)table.getValueAt(i,CAPTURE_COL)).booleanValue()){ capture.add(snpsByName.get(table.getValueAt(i,NAME_COL))); } } tagControl = new TaggerController(theData,include,exclude,capture, Integer.valueOf(aggressiveGroup.getSelection().getActionCommand()).intValue(),maxNumTags,true); runTaggerButton.setEnabled(false); taggerProgress.setIndeterminate(true); taggerProgress.setForeground(new Color(40,40,255)); taggerProgress.setMaximumSize(new Dimension(250,20)); taggerProgressPanel.setLayout(new BoxLayout(taggerProgressPanel,BoxLayout.Y_AXIS)); taggerProgressPanel.add(taggerProgressLabel); taggerProgressLabel.setAlignmentX(CENTER_ALIGNMENT); taggerProgressPanel.add(new JLabel(" ")); taggerProgressPanel.add(taggerProgress); remove(buttonPanel); add(taggerProgressPanel); revalidate(); tagControl.runTagger(); final TaggerConfigPanel tcp = this; timer = new Timer(100, new ActionListener(){ public void actionPerformed(ActionEvent e) { if(tagControl.isTaggingCompleted()) { remove(taggerProgressPanel); add(buttonPanel); runTaggerButton.setEnabled(true); //the parent of this is the jtabbedPane in the tagger tab of HV ((JTabbedPane)(tcp.getParent())).setSelectedIndex(1); fireTaggerEvent(new ActionEvent(tcp,ActionEvent.ACTION_PERFORMED,"taggingdone")); timer.stop(); } } }); timer.start(); }catch (TaggerException t){ JOptionPane.showMessageDialog(this, t.getMessage(), "Tagger", JOptionPane.ERROR_MESSAGE); } }else if (command.equals("Reset Table")){ for (int i = 0; i < table.getRowCount(); i++){ table.setValueAt(new Boolean(false), i, EXCLUDE_COL); table.setValueAt(new Boolean(false), i, INCLUDE_COL); table.setValueAt(new Boolean(true), i, CAPTURE_COL); } rsqField.setText(String.valueOf(Tagger.DEFAULT_RSQ_CUTOFF)); }else if (command.equals("Load Include File")){ Hashtable forceIncludes = new Hashtable(1,1); fc.setSelectedFile(new File("")); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION){ try{ BufferedReader br = new BufferedReader(new FileReader(fc.getSelectedFile())); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ line = line.trim(); forceIncludes.put(line,"I"); } } }catch(IOException ioe){ //throw new IOException("An error occured while reading the force includes file."); } } for (int i = 0; i < table.getRowCount(); i++){ if (forceIncludes.containsKey(table.getValueAt(i,NAME_COL))){ table.setValueAt(new Boolean(true),i,INCLUDE_COL); } } }else if (command.equals("Load Exclude File")){ Hashtable forceExcludes = new Hashtable(1,1); fc.setSelectedFile(new File("")); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION){ try{ BufferedReader br = new BufferedReader(new FileReader(fc.getSelectedFile())); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ line = line.trim(); forceExcludes.put(line,"E"); } } }catch(IOException ioe){ //throw new IOException("An error occured while reading the force excludes file."); } } for (int j = 0; j < table.getRowCount(); j++){ if (forceExcludes.containsKey(table.getValueAt(j,NAME_COL))){ table.setValueAt(new Boolean(true),j,EXCLUDE_COL); } } } } |
|
getXMLReader().parse( new InputSource( new StringReader( text ) ) ); | XMLReader xmlReader = getXMLReader(); xmlReader.setContentHandler(getJellyParser()); xmlReader.parse( new InputSource( new StringReader( text ) ) ); | protected void parseText(String text) throws JellyTagException { if ( log.isDebugEnabled() ) { log.debug( "About to parse: " + text ); } try { getXMLReader().parse( new InputSource( new StringReader( text ) ) ); } catch (Exception e) { throw new JellyTagException(e); } } |
public void run(Context context, XMLOutput output) throws Exception { | public void run(JellyContext context, XMLOutput output) throws Exception { | public void run(Context context, XMLOutput output) throws Exception { value = false; if ( test != null ) { if ( test.evaluateAsBoolean( context ) ) { value = true; getBody().run( context, output ); } } } |
public Folder() { children = new ArrayList(); | public Folder(int type, boolean populated) { this.populated = populated; this.type = type; this.children = new ArrayList(); if (type == COLUMNS) { name = "Columns"; } else if (type == IMPORTED_KEYS) { name = "Imported Keys"; } else if (type == EXPORTED_KEYS) { name = "Exported Keys"; } else { throw new IllegalArgumentException("Unknown folder type: "+type); } | public Folder() { children = new ArrayList(); } |
return true; | return populated; | public boolean isPopulated() { return true; } |
public void populate() { | public void populate() throws ArchitectException { try { if (type == COLUMNS) { parent.populateColumns(); } else if (type == IMPORTED_KEYS) { parent.populateRelationships(); } else if (type == EXPORTED_KEYS) { } else { throw new IllegalArgumentException("Unknown folder type: "+type); } } finally { populated = true; } | public void populate() { } |
protected void setParent(SQLObject newParent) { parent = newParent; | protected void setParent(SQLObject newParentTable) { parent = (SQLTable) newParentTable; | protected void setParent(SQLObject newParent) { parent = newParent; } |
this.columnsPopulated = false; this.relationshipsPopulated = false; | public SQLTable(SQLObject parent, String name, String remarks, String objectType) { logger.debug("NEW TABLE "+name+"@"+hashCode()); this.parent = parent; this.tableName = name; this.remarks = remarks; this.columnsPopulated = false; this.relationshipsPopulated = false; this.objectType = objectType; this.children = new ArrayList(); initFolders(); importedKeysFolder.addSQLObjectListener(this); } |
|
initFolders(); | initFolders(false); | public SQLTable(SQLObject parent, String name, String remarks, String objectType) { logger.debug("NEW TABLE "+name+"@"+hashCode()); this.parent = parent; this.tableName = name; this.remarks = remarks; this.columnsPopulated = false; this.relationshipsPopulated = false; this.objectType = objectType; this.children = new ArrayList(); initFolders(); importedKeysFolder.addSQLObjectListener(this); } |
t.columnsPopulated = true; t.relationshipsPopulated = true; | t.columnsFolder.populated = true; t.importedKeysFolder.populated = true; t.exportedKeysFolder.populated = true; | public static SQLTable getDerivedInstance(SQLTable source, SQLDatabase parent) throws ArchitectException { source.populate(); SQLTable t = new SQLTable(parent); t.columnsPopulated = true; t.relationshipsPopulated = true; t.tableName = source.tableName; t.remarks = source.remarks; t.primaryKeyName = source.getName()+"_pk"; t.inherit(source); parent.addChild(t); return t; } |
public void initFolders() { addChild(new Folder("Columns")); addChild(new Folder("Exported Keys")); addChild(new Folder("Imported Keys")); | public void initFolders(boolean populated) { addChild(new Folder(Folder.COLUMNS, populated)); addChild(new Folder(Folder.EXPORTED_KEYS, populated)); addChild(new Folder(Folder.IMPORTED_KEYS, populated)); | public void initFolders() { addChild(new Folder("Columns")); addChild(new Folder("Exported Keys")); addChild(new Folder("Imported Keys")); } |
return this.columnsPopulated; | return columnsFolder.isPopulated(); | public boolean isColumnsPopulated() { return this.columnsPopulated; } |
return columnsPopulated && relationshipsPopulated; | if (columnsFolder == null || importedKeysFolder == null || exportedKeysFolder == null) { return false; } else { return columnsFolder.isPopulated() && importedKeysFolder.isPopulated() && exportedKeysFolder.isPopulated(); } | public boolean isPopulated() { return columnsPopulated && relationshipsPopulated; } |
return this.relationshipsPopulated; | return importedKeysFolder.isPopulated() && exportedKeysFolder.isPopulated(); | public boolean isRelationshipsPopulated() { return this.relationshipsPopulated; } |
if (columnsPopulated) return; | if (columnsFolder.isPopulated()) return; | protected synchronized void populateColumns() throws ArchitectException { if (columnsPopulated) return; if (columnsFolder.children.size() > 0) throw new IllegalStateException("Can't populate table because it already contains columns"); try { SQLColumn.addColumnsToTable(this, getCatalogName(), getSchemaName(), tableName); columnsPopulated = true; } catch (SQLException e) { throw new ArchitectException("table.populate", e); } finally { columnsPopulated = true; Collections.sort(columnsFolder.children, new SQLColumn.SortByPKSeq()); normalizePrimaryKey(); int newSize = columnsFolder.children.size(); int[] changedIndices = new int[newSize]; for (int i = 0; i < newSize; i++) { changedIndices[i] = i; } columnsFolder.fireDbChildrenInserted(changedIndices, columnsFolder.children); } } |
columnsPopulated = true; | protected synchronized void populateColumns() throws ArchitectException { if (columnsPopulated) return; if (columnsFolder.children.size() > 0) throw new IllegalStateException("Can't populate table because it already contains columns"); try { SQLColumn.addColumnsToTable(this, getCatalogName(), getSchemaName(), tableName); columnsPopulated = true; } catch (SQLException e) { throw new ArchitectException("table.populate", e); } finally { columnsPopulated = true; Collections.sort(columnsFolder.children, new SQLColumn.SortByPKSeq()); normalizePrimaryKey(); int newSize = columnsFolder.children.size(); int[] changedIndices = new int[newSize]; for (int i = 0; i < newSize; i++) { changedIndices[i] = i; } columnsFolder.fireDbChildrenInserted(changedIndices, columnsFolder.children); } } |
|
throw new ArchitectException("table.populate", e); | throw new ArchitectException("Failed to populate columns of table "+getName(), e); | protected synchronized void populateColumns() throws ArchitectException { if (columnsPopulated) return; if (columnsFolder.children.size() > 0) throw new IllegalStateException("Can't populate table because it already contains columns"); try { SQLColumn.addColumnsToTable(this, getCatalogName(), getSchemaName(), tableName); columnsPopulated = true; } catch (SQLException e) { throw new ArchitectException("table.populate", e); } finally { columnsPopulated = true; Collections.sort(columnsFolder.children, new SQLColumn.SortByPKSeq()); normalizePrimaryKey(); int newSize = columnsFolder.children.size(); int[] changedIndices = new int[newSize]; for (int i = 0; i < newSize; i++) { changedIndices[i] = i; } columnsFolder.fireDbChildrenInserted(changedIndices, columnsFolder.children); } } |
columnsPopulated = true; | columnsFolder.populated = true; | protected synchronized void populateColumns() throws ArchitectException { if (columnsPopulated) return; if (columnsFolder.children.size() > 0) throw new IllegalStateException("Can't populate table because it already contains columns"); try { SQLColumn.addColumnsToTable(this, getCatalogName(), getSchemaName(), tableName); columnsPopulated = true; } catch (SQLException e) { throw new ArchitectException("table.populate", e); } finally { columnsPopulated = true; Collections.sort(columnsFolder.children, new SQLColumn.SortByPKSeq()); normalizePrimaryKey(); int newSize = columnsFolder.children.size(); int[] changedIndices = new int[newSize]; for (int i = 0; i < newSize; i++) { changedIndices[i] = i; } columnsFolder.fireDbChildrenInserted(changedIndices, columnsFolder.children); } } |
if (!columnsPopulated) throw new IllegalStateException("Table must be populated before relationships are added"); if (relationshipsPopulated) return; | if (!columnsFolder.isPopulated()) throw new IllegalStateException("Table must be populated before relationships are added"); if (importedKeysFolder.isPopulated()) return; | public synchronized void populateRelationships() throws ArchitectException { if (!columnsPopulated) throw new IllegalStateException("Table must be populated before relationships are added"); if (relationshipsPopulated) return; int oldSize = importedKeysFolder.children.size(); try { SQLRelationship.addRelationshipsToTable(this); relationshipsPopulated = true; } finally { relationshipsPopulated = true; int newSize = importedKeysFolder.children.size(); if (newSize > oldSize) { int[] changedIndices = new int[newSize - oldSize]; for (int i = 0, n = newSize - oldSize; i < n; i++) { changedIndices[i] = oldSize + i; } try { importedKeysFolder.fireDbChildrenInserted (changedIndices, importedKeysFolder.children.subList(oldSize, newSize)); } catch (IndexOutOfBoundsException ex) { logger.error("Index out of bounds while adding imported keys to table " +getName()+" where oldSize="+oldSize+"; newSize="+newSize +"; imported keys="+importedKeysFolder.children); } } } } |
relationshipsPopulated = true; | public synchronized void populateRelationships() throws ArchitectException { if (!columnsPopulated) throw new IllegalStateException("Table must be populated before relationships are added"); if (relationshipsPopulated) return; int oldSize = importedKeysFolder.children.size(); try { SQLRelationship.addRelationshipsToTable(this); relationshipsPopulated = true; } finally { relationshipsPopulated = true; int newSize = importedKeysFolder.children.size(); if (newSize > oldSize) { int[] changedIndices = new int[newSize - oldSize]; for (int i = 0, n = newSize - oldSize; i < n; i++) { changedIndices[i] = oldSize + i; } try { importedKeysFolder.fireDbChildrenInserted (changedIndices, importedKeysFolder.children.subList(oldSize, newSize)); } catch (IndexOutOfBoundsException ex) { logger.error("Index out of bounds while adding imported keys to table " +getName()+" where oldSize="+oldSize+"; newSize="+newSize +"; imported keys="+importedKeysFolder.children); } } } } |
|
relationshipsPopulated = true; | importedKeysFolder.populated = true; | public synchronized void populateRelationships() throws ArchitectException { if (!columnsPopulated) throw new IllegalStateException("Table must be populated before relationships are added"); if (relationshipsPopulated) return; int oldSize = importedKeysFolder.children.size(); try { SQLRelationship.addRelationshipsToTable(this); relationshipsPopulated = true; } finally { relationshipsPopulated = true; int newSize = importedKeysFolder.children.size(); if (newSize > oldSize) { int[] changedIndices = new int[newSize - oldSize]; for (int i = 0, n = newSize - oldSize; i < n; i++) { changedIndices[i] = oldSize + i; } try { importedKeysFolder.fireDbChildrenInserted (changedIndices, importedKeysFolder.children.subList(oldSize, newSize)); } catch (IndexOutOfBoundsException ex) { logger.error("Index out of bounds while adding imported keys to table " +getName()+" where oldSize="+oldSize+"; newSize="+newSize +"; imported keys="+importedKeysFolder.children); } } } } |
txw.abort(); | txw.commit(); | protected void createThumbnail( Volume volume ) { ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null ) { // If there are no instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.abort(); return; } // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_BILINEAR ); BufferedImage thumbImage = atOp.filter( origImage, null ); // Save it try { ImageIO.write( thumbImage, "jpg", thumbnailFile ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this perdsisten object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); txw.commit(); } |
System.err.println( e.getMessage() ); | log.warn( e.getMessage() ); | public static PhotovaultDatabases loadDatabases( File f ) { // Now try to read the info BeanReader beanReader = new BeanReader(); beanReader.getXMLIntrospector().getConfiguration().setAttributesForPrimitives(false); beanReader.getBindingConfiguration().setMapIDs(false); PhotovaultDatabases databases = null; try { beanReader.registerBeanClass( "databases", PhotovaultDatabases.class ); beanReader.registerBeanClass( "database", PVDatabase.class ); beanReader.registerBeanClass( "volume", Volume.class ); beanReader.registerBeanClass( "external-volume", ExternalVolume.class ); databases = (PhotovaultDatabases) beanReader.parse( f ); } catch ( Exception e ) { System.err.println( e.getMessage() ); } return databases; } |
} else if (type.equals("Long")) { propClass = Long.TYPE; } else if (type.equals("Float")) { propClass = Float.TYPE; } else if (type.equals("Double")) { propClass = Double.TYPE; } | } | public void doTag (XMLOutput output) throws MissingAttributeException, JellyTagException { // Check that this tag is used inside the body of // a DynaClass tag, so that it can access the // context of that tag DynaclassTag parentTag = (DynaclassTag) findAncestorWithClass( DynaclassTag.class ); if ( parentTag == null ) { throw new JellyTagException( "This tag must be enclosed inside a <dynaclass> tag" ); } // Check property name if (name == null) { throw new MissingAttributeException( "name" ); } // Lookup appropriate property class Class propClass = propertyClass; if (propClass == null) { // Check property type if (type == null) { throw new MissingAttributeException( "type" ); } if (type.equals("String")) { propClass = String.class; } else if (type.equals("Integer")) { propClass = Integer.TYPE; } else if (type.equals("Short")) { propClass = Short.TYPE; } else if (type.equals("Long")) { propClass = Long.TYPE; } else if (type.equals("Float")) { propClass = Float.TYPE; } else if (type.equals("Double")) { propClass = Double.TYPE; } else if (type.equals("Long")) { propClass = Long.TYPE; } if (propClass == null) { try { propClass = Class.forName(type); } catch (Exception e) { throw new JellyTagException ("Class " + type + " not found by Class.forName"); } } } // Create dynaproperty object with given name and type prop = new DynaProperty (name, propClass); // Add new property to dynaclass context parentTag.addDynaProperty(prop); } |
else if (type.equals("Float")) { propClass = Float.TYPE; } else if (type.equals("Double")) { propClass = Double.TYPE; } else if (type.equals("Long")) { propClass = Long.TYPE; } | public void doTag (XMLOutput output) throws MissingAttributeException, JellyTagException { // Check that this tag is used inside the body of // a DynaClass tag, so that it can access the // context of that tag DynaclassTag parentTag = (DynaclassTag) findAncestorWithClass( DynaclassTag.class ); if ( parentTag == null ) { throw new JellyTagException( "This tag must be enclosed inside a <dynaclass> tag" ); } // Check property name if (name == null) { throw new MissingAttributeException( "name" ); } // Lookup appropriate property class Class propClass = propertyClass; if (propClass == null) { // Check property type if (type == null) { throw new MissingAttributeException( "type" ); } if (type.equals("String")) { propClass = String.class; } else if (type.equals("Integer")) { propClass = Integer.TYPE; } else if (type.equals("Short")) { propClass = Short.TYPE; } else if (type.equals("Long")) { propClass = Long.TYPE; } else if (type.equals("Float")) { propClass = Float.TYPE; } else if (type.equals("Double")) { propClass = Double.TYPE; } else if (type.equals("Long")) { propClass = Long.TYPE; } if (propClass == null) { try { propClass = Class.forName(type); } catch (Exception e) { throw new JellyTagException ("Class " + type + " not found by Class.forName"); } } } // Create dynaproperty object with given name and type prop = new DynaProperty (name, propClass); // Add new property to dynaclass context parentTag.addDynaProperty(prop); } |
|
Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; | class SortingHelper implements Comparable{ String name; long pos; String extra; int orderInFile; public SortingHelper(String name, String extra, long pos, int order){ this.name = name; this.extra = extra; this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } } } | void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } } |
Vector sortHelpers = new Vector(); | void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } } |
|
pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); | sortHelpers.add(new SortingHelper((String)names.get(k),(String)extras.get(k),Long.parseLong((String)positions.get(k)),k)); | void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } } |
if(pos[k] < pos[k-1]) { | if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { | void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } } |
Arrays.sort(pos); | Collections.sort(sortHelpers); | void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } } |
for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); | for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; | void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } } |
byte[] tempGenotype = new byte[pos.length]; | byte[] tempGenotype = new byte[sortHelpers.size()]; | void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } } |
for(int i =0;i<pos.length;i++){ | for(int i =0;i<sortHelpers.size();i++){ | void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } } |
ResultSet rs = null; | public void doTag(XMLOutput output) throws Exception { if (!maxRowsSpecified) { Object obj = context.getVariable("org.apache.commons.jelly.sql.maxRows"); if (obj != null) { if (obj instanceof Integer) { maxRows = ((Integer) obj).intValue(); } else if (obj instanceof String) { try { maxRows = Integer.parseInt((String) obj); } catch (NumberFormatException nfe) { throw new JellyException( Resources.getMessage("SQL_MAXROWS_PARSE_ERROR", (String) obj), nfe); } } else { throw new JellyException(Resources.getMessage("SQL_MAXROWS_INVALID")); } } } Result result = null; String sqlStatement = null; log.debug( "About to lookup connection" ); try { conn = getConnection(); /* * Use the SQL statement specified by the sql attribute, if any, * otherwise use the body as the statement. */ if (sql != null) { sqlStatement = sql; } else { sqlStatement = getBodyText(); } if (sqlStatement == null || sqlStatement.trim().length() == 0) { throw new JellyException(Resources.getMessage("SQL_NO_STATEMENT")); } /* * We shouldn't have a negative startRow or illegal maxrows */ if ((startRow < 0) || (maxRows < -1)) { throw new JellyException(Resources.getMessage("PARAM_BAD_VALUE")); } /* * Note! We must not use the setMaxRows() method on the * the statement to limit the number of rows, since the * Result factory must be able to figure out the correct * value for isLimitedByMaxRows(); there's no way to check * if it was from the ResultSet. */ if ( log.isDebugEnabled() ) { log.debug( "About to execute query: " + sqlStatement ); } ResultSet rs = null; if ( hasParameters() ) { PreparedStatement ps = conn.prepareStatement(sqlStatement); setParameters(ps); rs = ps.executeQuery(); } else { Statement statement = conn.createStatement(); rs = statement.executeQuery(sqlStatement); } result = new ResultImpl(rs, startRow, maxRows); context.setVariable(var, result); } catch (SQLException e) { throw new JellyException(sqlStatement + ": " + e.getMessage(), e); } finally { if (conn != null && !isPartOfTransaction) { try { conn.close(); } catch (SQLException e) { } // Not much we can do conn = null; } clearParameters(); } } |
|
Statement statement = conn.createStatement(); | statement = conn.createStatement(); | public void doTag(XMLOutput output) throws Exception { if (!maxRowsSpecified) { Object obj = context.getVariable("org.apache.commons.jelly.sql.maxRows"); if (obj != null) { if (obj instanceof Integer) { maxRows = ((Integer) obj).intValue(); } else if (obj instanceof String) { try { maxRows = Integer.parseInt((String) obj); } catch (NumberFormatException nfe) { throw new JellyException( Resources.getMessage("SQL_MAXROWS_PARSE_ERROR", (String) obj), nfe); } } else { throw new JellyException(Resources.getMessage("SQL_MAXROWS_INVALID")); } } } Result result = null; String sqlStatement = null; log.debug( "About to lookup connection" ); try { conn = getConnection(); /* * Use the SQL statement specified by the sql attribute, if any, * otherwise use the body as the statement. */ if (sql != null) { sqlStatement = sql; } else { sqlStatement = getBodyText(); } if (sqlStatement == null || sqlStatement.trim().length() == 0) { throw new JellyException(Resources.getMessage("SQL_NO_STATEMENT")); } /* * We shouldn't have a negative startRow or illegal maxrows */ if ((startRow < 0) || (maxRows < -1)) { throw new JellyException(Resources.getMessage("PARAM_BAD_VALUE")); } /* * Note! We must not use the setMaxRows() method on the * the statement to limit the number of rows, since the * Result factory must be able to figure out the correct * value for isLimitedByMaxRows(); there's no way to check * if it was from the ResultSet. */ if ( log.isDebugEnabled() ) { log.debug( "About to execute query: " + sqlStatement ); } ResultSet rs = null; if ( hasParameters() ) { PreparedStatement ps = conn.prepareStatement(sqlStatement); setParameters(ps); rs = ps.executeQuery(); } else { Statement statement = conn.createStatement(); rs = statement.executeQuery(sqlStatement); } result = new ResultImpl(rs, startRow, maxRows); context.setVariable(var, result); } catch (SQLException e) { throw new JellyException(sqlStatement + ": " + e.getMessage(), e); } finally { if (conn != null && !isPartOfTransaction) { try { conn.close(); } catch (SQLException e) { } // Not much we can do conn = null; } clearParameters(); } } |
if (statement != null) { try { statement.close(); } catch (SQLException e) { } } if (rs != null) { try { rs.close(); } catch (SQLException e) { } } | public void doTag(XMLOutput output) throws Exception { if (!maxRowsSpecified) { Object obj = context.getVariable("org.apache.commons.jelly.sql.maxRows"); if (obj != null) { if (obj instanceof Integer) { maxRows = ((Integer) obj).intValue(); } else if (obj instanceof String) { try { maxRows = Integer.parseInt((String) obj); } catch (NumberFormatException nfe) { throw new JellyException( Resources.getMessage("SQL_MAXROWS_PARSE_ERROR", (String) obj), nfe); } } else { throw new JellyException(Resources.getMessage("SQL_MAXROWS_INVALID")); } } } Result result = null; String sqlStatement = null; log.debug( "About to lookup connection" ); try { conn = getConnection(); /* * Use the SQL statement specified by the sql attribute, if any, * otherwise use the body as the statement. */ if (sql != null) { sqlStatement = sql; } else { sqlStatement = getBodyText(); } if (sqlStatement == null || sqlStatement.trim().length() == 0) { throw new JellyException(Resources.getMessage("SQL_NO_STATEMENT")); } /* * We shouldn't have a negative startRow or illegal maxrows */ if ((startRow < 0) || (maxRows < -1)) { throw new JellyException(Resources.getMessage("PARAM_BAD_VALUE")); } /* * Note! We must not use the setMaxRows() method on the * the statement to limit the number of rows, since the * Result factory must be able to figure out the correct * value for isLimitedByMaxRows(); there's no way to check * if it was from the ResultSet. */ if ( log.isDebugEnabled() ) { log.debug( "About to execute query: " + sqlStatement ); } ResultSet rs = null; if ( hasParameters() ) { PreparedStatement ps = conn.prepareStatement(sqlStatement); setParameters(ps); rs = ps.executeQuery(); } else { Statement statement = conn.createStatement(); rs = statement.executeQuery(sqlStatement); } result = new ResultImpl(rs, startRow, maxRows); context.setVariable(var, result); } catch (SQLException e) { throw new JellyException(sqlStatement + ": " + e.getMessage(), e); } finally { if (conn != null && !isPartOfTransaction) { try { conn.close(); } catch (SQLException e) { } // Not much we can do conn = null; } clearParameters(); } } |
|
edittingDB = (SQLDatabase) so; | public void actionPerformed(ActionEvent e) { TreePath p = getSelectionPath(); if (p == null) { return; } SQLObject so = (SQLObject) p.getLastPathComponent(); if (so instanceof SQLDatabase) { DBConnectionSpec dbcs = ((SQLDatabase) so).getConnectionSpec(); logger.debug("Setting existing DBCS on panel: "+dbcs); dbcsPanel.setDbcs(dbcs); propDialog.setVisible(true); propDialog.requestFocus(); } else if (so instanceof SQLCatalog) { } else if (so instanceof SQLSchema) { } else if (so instanceof SQLTable) { } else if (so instanceof SQLColumn) { } } |
|
return TagScript.newInstance(tag); | return new AntTagScript(tag); | public TagScript createTagScript(String name, Attributes attributes) throws Exception { Project project = getProject(); // custom Ant tags if ( name.equals("fileScanner") ) { Tag tag = new FileScannerTag(new FileScanner(project)); return TagScript.newInstance(tag); } // is it an Ant task? Class type = (Class) project.getTaskDefinitions().get(name); if ( type != null ) { TaskTag tag = new TaskTag( project, type, name ); tag.setTrim( true ); if ( name.equals( "echo" ) ) { tag.setTrim(false); } return TagScript.newInstance(tag); } /* // an Ant DataType? DataType dataType = null; type = (Class) project.getDataTypeDefinitions().get(name); if ( type != null ) { try { java.lang.reflect.Constructor ctor = null; boolean noArg = false; // DataType can have a "no arg" constructor or take a single // Project argument. try { ctor = type.getConstructor(new Class[0]); noArg = true; } catch (NoSuchMethodException nse) { ctor = type.getConstructor(new Class[] { Project.class }); noArg = false; } if (noArg) { dataType = (DataType) ctor.newInstance(new Object[0]); } else { dataType = (DataType) ctor.newInstance(new Object[] {project}); } dataType.setProject( project ); } catch (Throwable t) { t.printStackTrace(); // ignore } } if ( dataType != null ) { DataTypeTag tag = new DataTypeTag( name, dataType ); tag.setAntProject( getProject() ); tag.getDynaBean().set( "project", project ); return TagScript.newInstance(tag); } */ // Since ant resolves so many dynamically loaded/created // things at run-time, we can make virtually no assumptions // as to what this tag might be. Tag tag = new OtherAntTag( project, name ); return TagScript.newInstance( tag ); } |
Tag tag = new OtherAntTag( project, | OtherAntTag tag = new OtherAntTag( project, | public TagScript createTagScript(String name, Attributes attributes) throws Exception { Project project = getProject(); // custom Ant tags if ( name.equals("fileScanner") ) { Tag tag = new FileScannerTag(new FileScanner(project)); return TagScript.newInstance(tag); } // is it an Ant task? Class type = (Class) project.getTaskDefinitions().get(name); if ( type != null ) { TaskTag tag = new TaskTag( project, type, name ); tag.setTrim( true ); if ( name.equals( "echo" ) ) { tag.setTrim(false); } return TagScript.newInstance(tag); } /* // an Ant DataType? DataType dataType = null; type = (Class) project.getDataTypeDefinitions().get(name); if ( type != null ) { try { java.lang.reflect.Constructor ctor = null; boolean noArg = false; // DataType can have a "no arg" constructor or take a single // Project argument. try { ctor = type.getConstructor(new Class[0]); noArg = true; } catch (NoSuchMethodException nse) { ctor = type.getConstructor(new Class[] { Project.class }); noArg = false; } if (noArg) { dataType = (DataType) ctor.newInstance(new Object[0]); } else { dataType = (DataType) ctor.newInstance(new Object[] {project}); } dataType.setProject( project ); } catch (Throwable t) { t.printStackTrace(); // ignore } } if ( dataType != null ) { DataTypeTag tag = new DataTypeTag( name, dataType ); tag.setAntProject( getProject() ); tag.getDynaBean().set( "project", project ); return TagScript.newInstance(tag); } */ // Since ant resolves so many dynamically loaded/created // things at run-time, we can make virtually no assumptions // as to what this tag might be. Tag tag = new OtherAntTag( project, name ); return TagScript.newInstance( tag ); } |
errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("invalid.login")); }else{ if(user.getLockCount() < MAX_LOGIN_ATTEMPTS_ALLOWED){ int thisAttempt = user.getLockCount()+1; | errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("invalid.login")); }else if("I".equals(user.getStatus())){ errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("account.locked")); }else if(user.getLockCount() < MAX_LOGIN_ATTEMPTS_ALLOWED){ int thisAttempt = user.getLockCount()+1; user.setLockCount(thisAttempt); if(thisAttempt == MAX_LOGIN_ATTEMPTS_ALLOWED){ user.setStatus("I"); errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("account.locked")); }else{ | public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception { LoginForm loginForm = (LoginForm) actionForm; LoginCallbackHandler callbackHandler = new LoginCallbackHandler(); callbackHandler.setUsername(loginForm.getUsername()); callbackHandler.setPassword(loginForm.getPassword()); // TODO: we should set this in startup or in startup script System.setProperty(AuthConstants.AUTH_CONFIG_SYS_PROPERTY, AuthConstants.AUTH_CONFIG_FILE_NAME); LoginContext loginContext = new LoginContext(AuthConstants.AUTH_CONFIG_INDEX, callbackHandler); User user = null; UserManager userManager = UserManager.getInstance(); try{ loginContext.login(); }catch(LoginException lex){ ActionErrors errors = new ActionErrors(); user = userManager.getUser(loginForm.getUsername()); /* Conditionalize the error message */ if(user == null){ errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("invalid.login")); }else{ if(user.getLockCount() < MAX_LOGIN_ATTEMPTS_ALLOWED){ int thisAttempt = user.getLockCount()+1; errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("invalid.login.attempt.count", String.valueOf(MAX_LOGIN_ATTEMPTS_ALLOWED - thisAttempt))); user.setLockCount(thisAttempt); if(thisAttempt == MAX_LOGIN_ATTEMPTS_ALLOWED) user.setStatus("I"); userManager.updateUser(user); }else{ errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("account.locked")); } } request.setAttribute(Globals.ERROR_KEY, errors); return mapping.getInputForward(); } /* set Subject in session */ context.setSubject(loginContext.getSubject()); user = context.getUser(); if(user.getLockCount() > 0){ user.setLockCount(0); userManager.updateUser(user); } return mapping.findForward(Forwards.SUCCESS); } |
user.setLockCount(thisAttempt); if(thisAttempt == MAX_LOGIN_ATTEMPTS_ALLOWED) user.setStatus("I"); userManager.updateUser(user); }else{ errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("account.locked")); | public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception { LoginForm loginForm = (LoginForm) actionForm; LoginCallbackHandler callbackHandler = new LoginCallbackHandler(); callbackHandler.setUsername(loginForm.getUsername()); callbackHandler.setPassword(loginForm.getPassword()); // TODO: we should set this in startup or in startup script System.setProperty(AuthConstants.AUTH_CONFIG_SYS_PROPERTY, AuthConstants.AUTH_CONFIG_FILE_NAME); LoginContext loginContext = new LoginContext(AuthConstants.AUTH_CONFIG_INDEX, callbackHandler); User user = null; UserManager userManager = UserManager.getInstance(); try{ loginContext.login(); }catch(LoginException lex){ ActionErrors errors = new ActionErrors(); user = userManager.getUser(loginForm.getUsername()); /* Conditionalize the error message */ if(user == null){ errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("invalid.login")); }else{ if(user.getLockCount() < MAX_LOGIN_ATTEMPTS_ALLOWED){ int thisAttempt = user.getLockCount()+1; errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("invalid.login.attempt.count", String.valueOf(MAX_LOGIN_ATTEMPTS_ALLOWED - thisAttempt))); user.setLockCount(thisAttempt); if(thisAttempt == MAX_LOGIN_ATTEMPTS_ALLOWED) user.setStatus("I"); userManager.updateUser(user); }else{ errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("account.locked")); } } request.setAttribute(Globals.ERROR_KEY, errors); return mapping.getInputForward(); } /* set Subject in session */ context.setSubject(loginContext.getSubject()); user = context.getUser(); if(user.getLockCount() > 0){ user.setLockCount(0); userManager.updateUser(user); } return mapping.findForward(Forwards.SUCCESS); } |
|
userManager.updateUser(user); }else{ errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("unknown.error")); | public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception { LoginForm loginForm = (LoginForm) actionForm; LoginCallbackHandler callbackHandler = new LoginCallbackHandler(); callbackHandler.setUsername(loginForm.getUsername()); callbackHandler.setPassword(loginForm.getPassword()); // TODO: we should set this in startup or in startup script System.setProperty(AuthConstants.AUTH_CONFIG_SYS_PROPERTY, AuthConstants.AUTH_CONFIG_FILE_NAME); LoginContext loginContext = new LoginContext(AuthConstants.AUTH_CONFIG_INDEX, callbackHandler); User user = null; UserManager userManager = UserManager.getInstance(); try{ loginContext.login(); }catch(LoginException lex){ ActionErrors errors = new ActionErrors(); user = userManager.getUser(loginForm.getUsername()); /* Conditionalize the error message */ if(user == null){ errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("invalid.login")); }else{ if(user.getLockCount() < MAX_LOGIN_ATTEMPTS_ALLOWED){ int thisAttempt = user.getLockCount()+1; errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("invalid.login.attempt.count", String.valueOf(MAX_LOGIN_ATTEMPTS_ALLOWED - thisAttempt))); user.setLockCount(thisAttempt); if(thisAttempt == MAX_LOGIN_ATTEMPTS_ALLOWED) user.setStatus("I"); userManager.updateUser(user); }else{ errors.add(ActionErrors.GLOBAL_ERROR, new ActionError("account.locked")); } } request.setAttribute(Globals.ERROR_KEY, errors); return mapping.getInputForward(); } /* set Subject in session */ context.setSubject(loginContext.getSubject()); user = context.getUser(); if(user.getLockCount() > 0){ user.setLockCount(0); userManager.updateUser(user); } return mapping.findForward(Forwards.SUCCESS); } |
|
System.out.println("The following markers appear in both the include and exclude lists: " + sb.toString()); | System.out.println("Fatal error: The following markers appear in both the include and exclude lists: " + sb.toString()); System.exit(1); | private void argHandler(String[] args){ int maxDistance = -1; //this means that user didn't specify any output type if it doesn't get changed below blockOutputType = -1; double hapThresh = -1; double minimumMAF=-1; double spacingThresh = -1; double minimumGenoPercent = -1; double hwCutoff = -1; double missingCutoff = -1; int maxMendel = -1; boolean assocTDT = false; boolean assocCC = false; permutationCount = 0; for(int i =0; i < args.length; i++) { if(args[i].equalsIgnoreCase("-help") || args[i].equalsIgnoreCase("-h")) { System.out.println(HELP_OUTPUT); System.exit(0); } else if(args[i].equalsIgnoreCase("-n") || args[i].equalsIgnoreCase("-nogui")) { nogui = true; } else if(args[i].equalsIgnoreCase("-p") || args[i].equalsIgnoreCase("-pedfile")) { i++; if( i>=args.length || (args[i].charAt(0) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(pedFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last pedfile listed will be used"); } pedFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-pcloadletter")){ System.err.println("PC LOADLETTER?! What the fuck does that mean?!"); System.exit(31337); } else if (args[i].equalsIgnoreCase("-skipcheck") || args[i].equalsIgnoreCase("--skipcheck")){ skipCheck = true; } else if (args[i].equalsIgnoreCase("-excludeMarkers")){ i++; if(i>=args.length || (args[i].charAt(0) == '-')){ System.out.println("-excludeMarkers requires a list of markers"); System.exit(1); } else { StringTokenizer str = new StringTokenizer(args[i],","); try { if (!quietMode) System.out.print("Excluding markers: "); while(str.hasMoreTokens()) { String token = str.nextToken(); if(token.indexOf("..") != -1) { int lastIndex = token.indexOf(".."); int rangeStart = Integer.parseInt(token.substring(0,lastIndex)); int rangeEnd = Integer.parseInt(token.substring(lastIndex+2,token.length())); for(int j=rangeStart;j<=rangeEnd;j++) { if (!quietMode) System.out.print(j+" "); excludedMarkers.add(new Integer(j)); } } else { if (!quietMode) System.out.println(token+" "); excludedMarkers.add(new Integer(token)); } } if (!quietMode) System.out.println(); } catch(NumberFormatException nfe) { System.out.println("-excludeMarkers argument should be of the format: 1,3,5..8,12"); System.exit(1); } } } else if(args[i].equalsIgnoreCase("-ha") || args[i].equalsIgnoreCase("-l") || args[i].equalsIgnoreCase("-haps")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(hapsFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last haps file listed will be used"); } hapsFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-i") || args[i].equalsIgnoreCase("-info")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(infoFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last info file listed will be used"); } infoFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-a") || args[i].equalsIgnoreCase("-hapmap")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(hapmapFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last hapmap file listed will be used"); } hapmapFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-k") || args[i].equalsIgnoreCase("-blocks")) { i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ blockFileName = args[i]; blockOutputType = BLOX_CUSTOM; }else{ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } } else if (args[i].equalsIgnoreCase("-png")){ outputPNG = true; } else if (args[i].equalsIgnoreCase("-smallpng") || args[i].equalsIgnoreCase("-compressedPNG")){ outputCompressedPNG = true; } else if (args[i].equalsIgnoreCase("-track")){ i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ trackFileName = args[i]; }else{ System.out.println("-track requires a filename"); System.exit(1); } } else if(args[i].equalsIgnoreCase("-o") || args[i].equalsIgnoreCase("-output") || args[i].equalsIgnoreCase("-blockoutput")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(blockOutputType != -1){ System.out.println("only one output argument is allowed"); System.exit(1); } if(args[i].equalsIgnoreCase("SFS") || args[i].equalsIgnoreCase("GAB")){ blockOutputType = BLOX_GABRIEL; } else if(args[i].equalsIgnoreCase("GAM")){ blockOutputType = BLOX_4GAM; } else if(args[i].equalsIgnoreCase("MJD") || args[i].equalsIgnoreCase("SPI")){ blockOutputType = BLOX_SPINE; } else if(args[i].equalsIgnoreCase("ALL")) { blockOutputType = BLOX_ALL; } } else { //defaults to SFS output blockOutputType = BLOX_GABRIEL; i--; } } else if(args[i].equalsIgnoreCase("-d") || args[i].equalsIgnoreCase("--dprime") || args[i].equalsIgnoreCase("-dprime")) { outputDprime = true; } else if (args[i].equalsIgnoreCase("-c") || args[i].equalsIgnoreCase("-check")){ outputCheck = true; } else if(args[i].equalsIgnoreCase("-m") || args[i].equalsIgnoreCase("-maxdistance")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires an integer argument"); System.exit(1); } else { if(maxDistance != -1){ System.out.println("only one "+args[i-1] + " argument allowed"); System.exit(1); } try { maxDistance = Integer.parseInt(args[i]); if(maxDistance<0){ System.out.println(args[i-1] + " argument must be a positive integer"); System.exit(1); } } catch(NumberFormatException nfe) { System.out.println(args[i-1] + " argument must be a positive integer"); System.exit(1); } } } else if(args[i].equalsIgnoreCase("-b") || args[i].equalsIgnoreCase("-batch")) { //batch mode i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(batchFileName != null){ System.out.println("multiple " + args[i-1] + " arguments found. only last batch file listed will be used"); } batchFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-hapthresh")) { i++; hapThresh = getDoubleArg(args,i,"-hapthresh",0,1); } else if(args[i].equalsIgnoreCase("-spacing")) { i++; spacingThresh = getDoubleArg(args,i,"-spacing",0,1); } else if(args[i].equalsIgnoreCase("-minMAF")) { i++; minimumMAF = getDoubleArg(args,i,"-minMAF",0,0.5); } else if(args[i].equalsIgnoreCase("-minGeno") || args[i].equalsIgnoreCase("-minGenoPercent")) { i++; minimumGenoPercent = getDoubleArg(args,i,"-minGeno",0,1); } else if(args[i].equalsIgnoreCase("-hwcutoff")) { i++; hwCutoff = getDoubleArg(args,i,"-hwcutoff",0,1); } else if(args[i].equalsIgnoreCase("-maxMendel") ) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println("-maxMendel requires an integer argument"); System.exit(1); } else { try { maxMendel = Integer.parseInt(args[i]); if(maxMendel<0){ System.out.println("-maxMendel argument must be a positive integer"); System.exit(1); } } catch(NumberFormatException nfe) { System.out.println("-maxMendel argument must be a positive integer"); System.exit(1); } } } else if(args[i].equalsIgnoreCase("-missingcutoff")) { i++; missingCutoff = getDoubleArg(args,i,"-missingCutoff",0,1); } else if(args[i].equalsIgnoreCase("-assoctdt")) { assocTDT = true; } else if(args[i].equalsIgnoreCase("-assoccc")) { assocCC = true; } else if(args[i].equalsIgnoreCase("-ldcolorscheme")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(args[i].equalsIgnoreCase("default")){ Options.setLDColorScheme(STD_SCHEME); } else if(args[i].equalsIgnoreCase("RSQ")){ Options.setLDColorScheme(RSQ_SCHEME); } else if(args[i].equalsIgnoreCase("DPALT") ){ Options.setLDColorScheme(WMF_SCHEME); } else if(args[i].equalsIgnoreCase("GAB")) { Options.setLDColorScheme(GAB_SCHEME); } else if(args[i].equalsIgnoreCase("GAM")) { Options.setLDColorScheme(GAM_SCHEME); } } else { //defaults to STD color scheme Options.setLDColorScheme(STD_SCHEME); i--; } } else if(args[i].equalsIgnoreCase("-permtests")) { i++; int permCount=0; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println("-permtests requires an integer argument"); System.exit(1); } else { try { permCount = Integer.parseInt(args[i]); if(permCount<0){ System.out.println("-permtests argument must be a positive integer"); System.exit(1); } } catch(NumberFormatException nfe) { System.out.println("-permtests argument must be a positive integer"); System.exit(1); } } doPermutationTest = true; permutationCount = permCount; } else if(args[i].equalsIgnoreCase("-customassoc")) { i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ customAssocTestsFileName = args[i]; }else{ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } } else if(args[i].equalsIgnoreCase("-doTagging")) { doTagging = true; } else if(args[i].equalsIgnoreCase("-tagrSqCutoff")) { i++; tagRSquaredCutOff = getDoubleArg(args,i,"-tagrSqCutoff",0,1); } else if(args[i].equalsIgnoreCase("-includeTags")) { i++; if(i>=args.length || args[i].charAt(0) == '-') { System.out.println(args[i-1] + " requires a list of marker names."); System.exit(1); } StringTokenizer str = new StringTokenizer(args[i],","); forceIncludeTags = new Vector(); while(str.hasMoreTokens()) { forceIncludeTags.add(str.nextToken()); } } else if (args[i].equalsIgnoreCase("-includeTagsFile")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { forceIncludeFileName =args[i]; }else { System.out.println(args[i-1] + " requires a filename"); System.exit(1); } } else if(args[i].equalsIgnoreCase("-excludeTags")) { i++; if(i>=args.length || args[i].charAt(0) == '-') { System.out.println("-excludeTags requires a list of marker names."); System.exit(1); } StringTokenizer str = new StringTokenizer(args[i],","); forceExcludeTags = new Vector(); while(str.hasMoreTokens()) { forceExcludeTags.add(str.nextToken()); } } else if (args[i].equalsIgnoreCase("-excludeTagsFile")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { forceExcludeFileName =args[i]; }else { System.out.println(args[i-1] + " requires a filename"); System.exit(1); } } else if(args[i].equalsIgnoreCase("-q") || args[i].equalsIgnoreCase("-quiet")) { quietMode = true; } else { System.out.println("invalid parameter specified: " + args[i]); System.exit(1); } } int countOptions = 0; if(pedFileName != null) { countOptions++; } if(hapsFileName != null) { countOptions++; } if(hapmapFileName != null) { countOptions++; } if(batchFileName != null) { countOptions++; } if(countOptions > 1) { System.out.println("Only one genotype input file may be specified on the command line."); System.exit(1); } else if(countOptions == 0 && nogui) { System.out.println("You must specify a genotype input file."); System.exit(1); } //mess with vars, set defaults, etc if(skipCheck && !quietMode) { System.out.println("Skipping genotype file check"); } if(maxDistance == -1){ maxDistance = 500; }else{ if (!quietMode) System.out.println("Max LD comparison distance = " +maxDistance); } Options.setMaxDistance(maxDistance); if(hapThresh != -1) { Options.setHaplotypeDisplayThreshold((int)(hapThresh*100)); if (!quietMode) System.out.println("Haplotype display threshold = " + hapThresh); } if(minimumMAF != -1) { CheckData.mafCut = minimumMAF; if (!quietMode) System.out.println("Minimum MAF = " + minimumMAF); } if(minimumGenoPercent != -1) { CheckData.failedGenoCut = (int)(minimumGenoPercent*100); if (!quietMode) System.out.println("Minimum SNP genotype % = " + minimumGenoPercent); } if(hwCutoff != -1) { CheckData.hwCut = hwCutoff; if (!quietMode) System.out.println("Hardy Weinberg equilibrium p-value cutoff = " + hwCutoff); } if(maxMendel != -1) { CheckData.numMendErrCut = maxMendel; if (!quietMode) System.out.println("Maximum number of Mendel errors = "+maxMendel); } if(spacingThresh != -1) { Options.setSpacingThreshold(spacingThresh); if (!quietMode) System.out.println("LD display spacing value = "+spacingThresh); } if(missingCutoff != -1) { Options.setMissingThreshold(missingCutoff); if (!quietMode) System.out.println("Maximum amount of missing data allowed per individual = "+missingCutoff); } if(assocTDT) { Options.setAssocTest(ASSOC_TRIO); } else if(assocCC) { Options.setAssocTest(ASSOC_CC); } if(doPermutationTest) { if(!assocCC && !assocTDT) { System.out.println("An association test type must be specified for permutation tests to be performed."); System.exit(1); } } if(customAssocTestsFileName != null) { if(!assocCC && !assocTDT) { System.out.println("An association test type must be specified when using a custom association test file."); System.exit(1); } if(infoFileName == null) { System.out.println("A marker info file must be specified when using a custom association test file."); System.exit(1); } } if(doTagging) { if(infoFileName == null && hapmapFileName == null) { System.out.println("A marker info file must be specified when using -doTagging"); System.exit(1); } if(forceExcludeTags == null) { forceExcludeTags = new Vector(); } else if (forceExcludeFileName != null) { System.out.println("-excludeTags and -excludeTagsFile cannot both be used"); System.exit(1); } if(forceExcludeFileName != null) { File excludeFile = new File(forceExcludeFileName); forceExcludeTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(excludeFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ forceExcludeTags.add(line); } } }catch(IOException ioe) { System.out.println("An error occured while reading the file specified by -excludeTagsFile."); System.exit(1); } } if(forceIncludeTags == null ) { forceIncludeTags = new Vector(); } else if (forceIncludeFileName != null) { System.out.println("-includeTags and -includeTagsFile cannot both be used"); System.exit(1); } if(forceIncludeFileName != null) { File includeFile = new File(forceIncludeFileName); forceIncludeTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(includeFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ forceIncludeTags.add(line); } } }catch(IOException ioe) { System.out.println("An error occured while reading the file specified by -includeTagsFile."); System.exit(1); } } //check that there isn't any overlap between include/exclude lists Vector tempInclude = (Vector) forceIncludeTags.clone(); tempInclude.retainAll(forceExcludeTags); if(tempInclude.size() > 0) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < tempInclude.size(); i++) { String s = (String) tempInclude.elementAt(i); sb.append(s).append(","); } System.out.println("The following markers appear in both the include and exclude lists: " + sb.toString()); } if(tagRSquaredCutOff != -1) { Options.setTaggerRsqCutoff(tagRSquaredCutOff); } } else if(forceExcludeTags != null || forceIncludeTags != null || tagRSquaredCutOff != -1) { System.out.println("-tagrSqCutoff, -excludeTags, -excludeTagsFile, -includeTags and -includeTagsFile cannot be used without -doTagging"); System.exit(1); } } |
Hashtable idsByName = new Hashtable(); | private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; AssociationTestSet customAssocSet; if(!quietMode && fileName != null){ System.out.println("Using data file: " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); //Vector result = null; if(fileType == HAPS_FILE){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED_FILE) { //read in ped file textData.linkageToChrom(inputFile, PED_FILE); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile textData.linkageToChrom(inputFile,HMP_FILE); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (fileType != HAPS_FILE){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } HashSet whiteListedCustomMarkers = new HashSet(); if (customAssocTestsFileName != null){ customAssocSet = new AssociationTestSet(customAssocTestsFileName); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } Hashtable snpsByName = new Hashtable(); for(int i=0;i<Chromosome.getUnfilteredSize();i++) { SNP snp = Chromosome.getUnfilteredMarker(i); snpsByName.put(snp.getName(), snp); } for(int i=0;i<forceIncludeTags.size();i++) { if(snpsByName.containsKey(forceIncludeTags.get(i))) { whiteListedCustomMarkers.add(snpsByName.get(forceIncludeTags.get(i))); } } textData.setWhiteList(whiteListedCustomMarkers); boolean[] markerResults = new boolean[Chromosome.getUnfilteredSize()]; Vector result = null; if (fileType != HAPS_FILE){ result = textData.getPedFile().getResults(); //once check has been run we can filter the markers for (int i = 0; i < result.size(); i++){ if (((((MarkerResult)result.get(i)).getRating() > 0 || skipCheck) && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2)){ markerResults[i] = true; }else{ markerResults[i] = false; } } }else{ //we haven't done the check (HAPS files) Arrays.fill(markerResults, true); } for (int i = 0; i < excludedMarkers.size(); i++){ int cur = ((Integer)excludedMarkers.elementAt(i)).intValue(); if (cur < 1 || cur > markerResults.length){ System.out.println("Excluded marker out of bounds has been ignored: " + cur + "\nMarkers must be between 1 and N, where N is the total number of markers."); System.exit(1); }else{ markerResults[cur-1] = false; } } for(int i=0;i<Chromosome.getUnfilteredSize();i++) { if(textData.isWhiteListed(Chromosome.getUnfilteredMarker(i))) { markerResults[i] = true; } } Chromosome.doFilter(markerResults); if(!quietMode && infoFile != null){ System.out.println("Using marker information file: " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); AssociationTestSet blockTestSet = null; if(blockOutputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; Haplotype[][] filtHaplos; switch(blockOutputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); if(!quietMode) { System.out.println("Using custom blocks file " + blockFileName); } cust = textData.readBlocks(blocksFile); break; case BLOX_ALL: //handled below, so we don't do anything here OutputFile = null; break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL if(blockOutputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid Gabriel blocks."); } OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile);; }else if (!quietMode){ System.out.println("Skipping block output: no valid 4 Gamete blocks."); } OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid LD Spine blocks."); } }else{ //guesses blocks based on output type determined above. textData.guessBlocks(blockOutputType, cust); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid blocks."); } } if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { if (blockOutputType == BLOX_ALL){ System.out.println("Haplotype association results cannot be used with block output \"ALL\""); }else{ if (haplos != null){ blockTestSet = new AssociationTestSet(haplos,null); blockTestSet.saveHapsToText(validateOutputFile(fileName + ".HAPASSOC")); }else if (!quietMode){ System.out.println("Skipping block association output: no valid blocks."); } } } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); if(!quietMode) { System.out.println("Using analysis track file " + trackFileName); } } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getUnfilteredSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getName()); }catch(JimiException je){ System.out.println(je.getMessage()); } } AssociationTestSet markerTestSet =null; if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC){ markerTestSet = new AssociationTestSet(textData.getPedFile(),null,Chromosome.getAllMarkers()); markerTestSet.saveSNPsToText(validateOutputFile(fileName + ".ASSOC")); } if(customAssocSet != null) { if(!quietMode) { System.out.println("Using custom association test file " + customAssocTestsFileName); } try { customAssocSet.runFileTests(textData,markerTestSet.getMarkerAssociationResults()); customAssocSet.saveResultsToText(validateOutputFile(fileName + ".CUSTASSOC")); }catch(IOException ioe) { System.out.println("An error occured writing the custom association results file."); customAssocSet = null; } } if(doPermutationTest) { AssociationTestSet permTests = null; if( customAssocSet != null) { permTests = customAssocSet; }else { permTests = new AssociationTestSet(); permTests.cat(markerTestSet); permTests.cat(blockTestSet); } final PermutationTestSet pts = new PermutationTestSet(permutationCount,textData.getSavedEMs(),textData.getPedFile(),permTests); Thread permThread = new Thread(new Runnable() { public void run() { pts.doPermutations(); } }); permThread.start(); if(!quietMode) { System.out.println("Starting " + permutationCount + " permutation tests (each . printed represents 1% of tests completed)"); } int dotsPrinted =0; while(pts.getPermutationCount() - pts.getPermutationsPerformed() > 0) { while(( (double)pts.getPermutationsPerformed() / pts.getPermutationCount())*100 > dotsPrinted) { System.out.print("."); dotsPrinted++; } try{ Thread.currentThread().sleep(100); }catch(InterruptedException ie) {} } System.out.println(); try { pts.writeResultsToFile(validateOutputFile(fileName + ".PERMUT")); } catch(IOException ioe) { System.out.println("An error occured while writing the permutation test results to file."); } } if(doTagging) { if(textData.dpTable == null) { textData.generateDPrimeTable(); } Vector snps = Chromosome.getAllMarkers(); HashSet names = new HashSet(); Hashtable idsByName = new Hashtable(); for (int i = 0; i < snps.size(); i++) { SNP snp = (SNP) snps.elementAt(i); names.add(snp.getName()); idsByName.put(snp.getName(),new Integer(i)); } Vector sitesToCapture = new Vector(); for(int i=0;i<Chromosome.getSize();i++) { sitesToCapture.add(Chromosome.getMarker(i)); } for (int i = 0; i < forceIncludeTags.size(); i++) { String s = (String) forceIncludeTags.elementAt(i); if(!names.contains(s)) { System.out.println("Marker " + s + " in the list of forced included tags does not appear in the marker info file."); System.exit(1); } } for (int i = 0; i < forceExcludeTags.size(); i++) { String s = (String) forceExcludeTags.elementAt(i); if(!names.contains(s)) { System.out.println("Marker " + s + " in the list of forced excluded tags does not appear in the marker info file."); System.exit(1); } } if(!quietMode) { System.out.println("Starting tagging."); } TaggerController tc = new TaggerController(textData,forceIncludeTags,forceExcludeTags,sitesToCapture, Tagger.AGGRESSIVE_TRIPLE); tc.runTagger(); while(!tc.isTaggingCompleted()) { try { Thread.sleep(100); }catch(InterruptedException ie) {} } tc.saveResultsToFile(validateOutputFile(fileName + ".TAGS")); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } } |
|
idsByName.put(snp.getName(),new Integer(i)); | } HashSet filteredNames = new HashSet(); for(int i=0;i<Chromosome.getSize();i++) { filteredNames.add(Chromosome.getMarker(i).getName()); | private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; AssociationTestSet customAssocSet; if(!quietMode && fileName != null){ System.out.println("Using data file: " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); //Vector result = null; if(fileType == HAPS_FILE){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED_FILE) { //read in ped file textData.linkageToChrom(inputFile, PED_FILE); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile textData.linkageToChrom(inputFile,HMP_FILE); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (fileType != HAPS_FILE){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } HashSet whiteListedCustomMarkers = new HashSet(); if (customAssocTestsFileName != null){ customAssocSet = new AssociationTestSet(customAssocTestsFileName); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } Hashtable snpsByName = new Hashtable(); for(int i=0;i<Chromosome.getUnfilteredSize();i++) { SNP snp = Chromosome.getUnfilteredMarker(i); snpsByName.put(snp.getName(), snp); } for(int i=0;i<forceIncludeTags.size();i++) { if(snpsByName.containsKey(forceIncludeTags.get(i))) { whiteListedCustomMarkers.add(snpsByName.get(forceIncludeTags.get(i))); } } textData.setWhiteList(whiteListedCustomMarkers); boolean[] markerResults = new boolean[Chromosome.getUnfilteredSize()]; Vector result = null; if (fileType != HAPS_FILE){ result = textData.getPedFile().getResults(); //once check has been run we can filter the markers for (int i = 0; i < result.size(); i++){ if (((((MarkerResult)result.get(i)).getRating() > 0 || skipCheck) && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2)){ markerResults[i] = true; }else{ markerResults[i] = false; } } }else{ //we haven't done the check (HAPS files) Arrays.fill(markerResults, true); } for (int i = 0; i < excludedMarkers.size(); i++){ int cur = ((Integer)excludedMarkers.elementAt(i)).intValue(); if (cur < 1 || cur > markerResults.length){ System.out.println("Excluded marker out of bounds has been ignored: " + cur + "\nMarkers must be between 1 and N, where N is the total number of markers."); System.exit(1); }else{ markerResults[cur-1] = false; } } for(int i=0;i<Chromosome.getUnfilteredSize();i++) { if(textData.isWhiteListed(Chromosome.getUnfilteredMarker(i))) { markerResults[i] = true; } } Chromosome.doFilter(markerResults); if(!quietMode && infoFile != null){ System.out.println("Using marker information file: " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); AssociationTestSet blockTestSet = null; if(blockOutputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; Haplotype[][] filtHaplos; switch(blockOutputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); if(!quietMode) { System.out.println("Using custom blocks file " + blockFileName); } cust = textData.readBlocks(blocksFile); break; case BLOX_ALL: //handled below, so we don't do anything here OutputFile = null; break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL if(blockOutputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid Gabriel blocks."); } OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile);; }else if (!quietMode){ System.out.println("Skipping block output: no valid 4 Gamete blocks."); } OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid LD Spine blocks."); } }else{ //guesses blocks based on output type determined above. textData.guessBlocks(blockOutputType, cust); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid blocks."); } } if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { if (blockOutputType == BLOX_ALL){ System.out.println("Haplotype association results cannot be used with block output \"ALL\""); }else{ if (haplos != null){ blockTestSet = new AssociationTestSet(haplos,null); blockTestSet.saveHapsToText(validateOutputFile(fileName + ".HAPASSOC")); }else if (!quietMode){ System.out.println("Skipping block association output: no valid blocks."); } } } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); if(!quietMode) { System.out.println("Using analysis track file " + trackFileName); } } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getUnfilteredSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getName()); }catch(JimiException je){ System.out.println(je.getMessage()); } } AssociationTestSet markerTestSet =null; if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC){ markerTestSet = new AssociationTestSet(textData.getPedFile(),null,Chromosome.getAllMarkers()); markerTestSet.saveSNPsToText(validateOutputFile(fileName + ".ASSOC")); } if(customAssocSet != null) { if(!quietMode) { System.out.println("Using custom association test file " + customAssocTestsFileName); } try { customAssocSet.runFileTests(textData,markerTestSet.getMarkerAssociationResults()); customAssocSet.saveResultsToText(validateOutputFile(fileName + ".CUSTASSOC")); }catch(IOException ioe) { System.out.println("An error occured writing the custom association results file."); customAssocSet = null; } } if(doPermutationTest) { AssociationTestSet permTests = null; if( customAssocSet != null) { permTests = customAssocSet; }else { permTests = new AssociationTestSet(); permTests.cat(markerTestSet); permTests.cat(blockTestSet); } final PermutationTestSet pts = new PermutationTestSet(permutationCount,textData.getSavedEMs(),textData.getPedFile(),permTests); Thread permThread = new Thread(new Runnable() { public void run() { pts.doPermutations(); } }); permThread.start(); if(!quietMode) { System.out.println("Starting " + permutationCount + " permutation tests (each . printed represents 1% of tests completed)"); } int dotsPrinted =0; while(pts.getPermutationCount() - pts.getPermutationsPerformed() > 0) { while(( (double)pts.getPermutationsPerformed() / pts.getPermutationCount())*100 > dotsPrinted) { System.out.print("."); dotsPrinted++; } try{ Thread.currentThread().sleep(100); }catch(InterruptedException ie) {} } System.out.println(); try { pts.writeResultsToFile(validateOutputFile(fileName + ".PERMUT")); } catch(IOException ioe) { System.out.println("An error occured while writing the permutation test results to file."); } } if(doTagging) { if(textData.dpTable == null) { textData.generateDPrimeTable(); } Vector snps = Chromosome.getAllMarkers(); HashSet names = new HashSet(); Hashtable idsByName = new Hashtable(); for (int i = 0; i < snps.size(); i++) { SNP snp = (SNP) snps.elementAt(i); names.add(snp.getName()); idsByName.put(snp.getName(),new Integer(i)); } Vector sitesToCapture = new Vector(); for(int i=0;i<Chromosome.getSize();i++) { sitesToCapture.add(Chromosome.getMarker(i)); } for (int i = 0; i < forceIncludeTags.size(); i++) { String s = (String) forceIncludeTags.elementAt(i); if(!names.contains(s)) { System.out.println("Marker " + s + " in the list of forced included tags does not appear in the marker info file."); System.exit(1); } } for (int i = 0; i < forceExcludeTags.size(); i++) { String s = (String) forceExcludeTags.elementAt(i); if(!names.contains(s)) { System.out.println("Marker " + s + " in the list of forced excluded tags does not appear in the marker info file."); System.exit(1); } } if(!quietMode) { System.out.println("Starting tagging."); } TaggerController tc = new TaggerController(textData,forceIncludeTags,forceExcludeTags,sitesToCapture, Tagger.AGGRESSIVE_TRIPLE); tc.runTagger(); while(!tc.isTaggingCompleted()) { try { Thread.sleep(100); }catch(InterruptedException ie) {} } tc.saveResultsToFile(validateOutputFile(fileName + ".TAGS")); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } } |
forceExcludeTags.retainAll(filteredNames); | private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; AssociationTestSet customAssocSet; if(!quietMode && fileName != null){ System.out.println("Using data file: " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); //Vector result = null; if(fileType == HAPS_FILE){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED_FILE) { //read in ped file textData.linkageToChrom(inputFile, PED_FILE); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile textData.linkageToChrom(inputFile,HMP_FILE); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (fileType != HAPS_FILE){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } HashSet whiteListedCustomMarkers = new HashSet(); if (customAssocTestsFileName != null){ customAssocSet = new AssociationTestSet(customAssocTestsFileName); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } Hashtable snpsByName = new Hashtable(); for(int i=0;i<Chromosome.getUnfilteredSize();i++) { SNP snp = Chromosome.getUnfilteredMarker(i); snpsByName.put(snp.getName(), snp); } for(int i=0;i<forceIncludeTags.size();i++) { if(snpsByName.containsKey(forceIncludeTags.get(i))) { whiteListedCustomMarkers.add(snpsByName.get(forceIncludeTags.get(i))); } } textData.setWhiteList(whiteListedCustomMarkers); boolean[] markerResults = new boolean[Chromosome.getUnfilteredSize()]; Vector result = null; if (fileType != HAPS_FILE){ result = textData.getPedFile().getResults(); //once check has been run we can filter the markers for (int i = 0; i < result.size(); i++){ if (((((MarkerResult)result.get(i)).getRating() > 0 || skipCheck) && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2)){ markerResults[i] = true; }else{ markerResults[i] = false; } } }else{ //we haven't done the check (HAPS files) Arrays.fill(markerResults, true); } for (int i = 0; i < excludedMarkers.size(); i++){ int cur = ((Integer)excludedMarkers.elementAt(i)).intValue(); if (cur < 1 || cur > markerResults.length){ System.out.println("Excluded marker out of bounds has been ignored: " + cur + "\nMarkers must be between 1 and N, where N is the total number of markers."); System.exit(1); }else{ markerResults[cur-1] = false; } } for(int i=0;i<Chromosome.getUnfilteredSize();i++) { if(textData.isWhiteListed(Chromosome.getUnfilteredMarker(i))) { markerResults[i] = true; } } Chromosome.doFilter(markerResults); if(!quietMode && infoFile != null){ System.out.println("Using marker information file: " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); AssociationTestSet blockTestSet = null; if(blockOutputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; Haplotype[][] filtHaplos; switch(blockOutputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); if(!quietMode) { System.out.println("Using custom blocks file " + blockFileName); } cust = textData.readBlocks(blocksFile); break; case BLOX_ALL: //handled below, so we don't do anything here OutputFile = null; break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL if(blockOutputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid Gabriel blocks."); } OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile);; }else if (!quietMode){ System.out.println("Skipping block output: no valid 4 Gamete blocks."); } OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid LD Spine blocks."); } }else{ //guesses blocks based on output type determined above. textData.guessBlocks(blockOutputType, cust); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid blocks."); } } if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { if (blockOutputType == BLOX_ALL){ System.out.println("Haplotype association results cannot be used with block output \"ALL\""); }else{ if (haplos != null){ blockTestSet = new AssociationTestSet(haplos,null); blockTestSet.saveHapsToText(validateOutputFile(fileName + ".HAPASSOC")); }else if (!quietMode){ System.out.println("Skipping block association output: no valid blocks."); } } } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); if(!quietMode) { System.out.println("Using analysis track file " + trackFileName); } } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getUnfilteredSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getName()); }catch(JimiException je){ System.out.println(je.getMessage()); } } AssociationTestSet markerTestSet =null; if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC){ markerTestSet = new AssociationTestSet(textData.getPedFile(),null,Chromosome.getAllMarkers()); markerTestSet.saveSNPsToText(validateOutputFile(fileName + ".ASSOC")); } if(customAssocSet != null) { if(!quietMode) { System.out.println("Using custom association test file " + customAssocTestsFileName); } try { customAssocSet.runFileTests(textData,markerTestSet.getMarkerAssociationResults()); customAssocSet.saveResultsToText(validateOutputFile(fileName + ".CUSTASSOC")); }catch(IOException ioe) { System.out.println("An error occured writing the custom association results file."); customAssocSet = null; } } if(doPermutationTest) { AssociationTestSet permTests = null; if( customAssocSet != null) { permTests = customAssocSet; }else { permTests = new AssociationTestSet(); permTests.cat(markerTestSet); permTests.cat(blockTestSet); } final PermutationTestSet pts = new PermutationTestSet(permutationCount,textData.getSavedEMs(),textData.getPedFile(),permTests); Thread permThread = new Thread(new Runnable() { public void run() { pts.doPermutations(); } }); permThread.start(); if(!quietMode) { System.out.println("Starting " + permutationCount + " permutation tests (each . printed represents 1% of tests completed)"); } int dotsPrinted =0; while(pts.getPermutationCount() - pts.getPermutationsPerformed() > 0) { while(( (double)pts.getPermutationsPerformed() / pts.getPermutationCount())*100 > dotsPrinted) { System.out.print("."); dotsPrinted++; } try{ Thread.currentThread().sleep(100); }catch(InterruptedException ie) {} } System.out.println(); try { pts.writeResultsToFile(validateOutputFile(fileName + ".PERMUT")); } catch(IOException ioe) { System.out.println("An error occured while writing the permutation test results to file."); } } if(doTagging) { if(textData.dpTable == null) { textData.generateDPrimeTable(); } Vector snps = Chromosome.getAllMarkers(); HashSet names = new HashSet(); Hashtable idsByName = new Hashtable(); for (int i = 0; i < snps.size(); i++) { SNP snp = (SNP) snps.elementAt(i); names.add(snp.getName()); idsByName.put(snp.getName(),new Integer(i)); } Vector sitesToCapture = new Vector(); for(int i=0;i<Chromosome.getSize();i++) { sitesToCapture.add(Chromosome.getMarker(i)); } for (int i = 0; i < forceIncludeTags.size(); i++) { String s = (String) forceIncludeTags.elementAt(i); if(!names.contains(s)) { System.out.println("Marker " + s + " in the list of forced included tags does not appear in the marker info file."); System.exit(1); } } for (int i = 0; i < forceExcludeTags.size(); i++) { String s = (String) forceExcludeTags.elementAt(i); if(!names.contains(s)) { System.out.println("Marker " + s + " in the list of forced excluded tags does not appear in the marker info file."); System.exit(1); } } if(!quietMode) { System.out.println("Starting tagging."); } TaggerController tc = new TaggerController(textData,forceIncludeTags,forceExcludeTags,sitesToCapture, Tagger.AGGRESSIVE_TRIPLE); tc.runTagger(); while(!tc.isTaggingCompleted()) { try { Thread.sleep(100); }catch(InterruptedException ie) {} } tc.saveResultsToFile(validateOutputFile(fileName + ".TAGS")); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } } |
|
public Set<SQLColumn> generateColumnList(int num){ | public Set<SQLColumn> generateColumnList(int num) throws ArchitectException{ | public Set<SQLColumn> generateColumnList(int num){ Set<SQLColumn> colList = new TreeSet<SQLColumn>(comparator); for (int ii=1; ii <= num; ii++){ colList.add(new SQLColumn(new SQLTable(),"col"+ii,Types.INTEGER,3, 0)); } return colList; } |
public void testCompareColumn (){ | public void testCompareColumn () throws ArchitectException{ | public void testCompareColumn (){ Set<SQLColumn>list1 = generateColumnList(3); Set<SQLColumn>list2 = generateColumnList(3); assertEquals (0, colComparator.compareColumns(list1, list2)); list1.add(new SQLColumn()); assertEquals (1, colComparator.compareColumns(list1, list2)); assertEquals (-1, colComparator.compareColumns(list2, list1)); } |
return getChildren(); | return getTableDescendants(this); | public List getTables() throws ArchitectException { return getChildren(); } |
tag.setContext(context); | try { tag.setContext(context); DynaTag dynaTag = (DynaTag) tag; for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = expression.evaluate(context); dynaTag.setAttribute(name, value); } | public void run(JellyContext context, XMLOutput output) throws Exception { tag.setContext(context); DynaTag dynaTag = (DynaTag) tag; // ### probably compiling this to 2 arrays might be quicker and smaller for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = expression.evaluate(context); dynaTag.setAttribute(name, value); } runTag(output); } |
DynaTag dynaTag = (DynaTag) tag; for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = expression.evaluate(context); dynaTag.setAttribute(name, value); | tag.doTag(output); } catch (JellyException e) { handleException(e); | public void run(JellyContext context, XMLOutput output) throws Exception { tag.setContext(context); DynaTag dynaTag = (DynaTag) tag; // ### probably compiling this to 2 arrays might be quicker and smaller for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = expression.evaluate(context); dynaTag.setAttribute(name, value); } runTag(output); } |
runTag(output); | catch (Exception e) { handleException(e); } | public void run(JellyContext context, XMLOutput output) throws Exception { tag.setContext(context); DynaTag dynaTag = (DynaTag) tag; // ### probably compiling this to 2 arrays might be quicker and smaller for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = expression.evaluate(context); dynaTag.setAttribute(name, value); } runTag(output); } |
try { Method method = taskType.getMethod("addText", addTaskParamTypes); if (method != null) { Object[] args = { text }; method.invoke(task, args); } | Method method = MethodUtils.getAccessibleMethod( taskType, "addText", addTaskParamTypes ); if (method != null) { Object[] args = { text }; method.invoke(task, args); | public void doTag(XMLOutput output) throws Exception { Task task = getTask(); String text = getBodyText(); // if the task has an addText() try { Method method = taskType.getMethod("addText", addTaskParamTypes); if (method != null) { Object[] args = { text }; method.invoke(task, args); } } catch (NoSuchMethodException e) { // this is hardly an exceptional case unfortunately // the JDK should just return null! } task.perform(); } |
catch (NoSuchMethodException e) { } | public void doTag(XMLOutput output) throws Exception { Task task = getTask(); String text = getBodyText(); // if the task has an addText() try { Method method = taskType.getMethod("addText", addTaskParamTypes); if (method != null) { Object[] args = { text }; method.invoke(task, args); } } catch (NoSuchMethodException e) { // this is hardly an exceptional case unfortunately // the JDK should just return null! } task.perform(); } |
|
List items = pp.getSelectedItems(); if (items.size() > 1) { int decision = JOptionPane.showConfirmDialog(pp, "Are you sure you want to delete the " +items.size()+" selected items?", "Multiple Delete", JOptionPane.YES_NO_OPTION); if (decision == JOptionPane.NO_OPTION) { return; | if (evt.getActionCommand().equals(ArchitectSwingConstants.ACTION_COMMAND_SRC_PLAYPEN)) { List items = pp.getSelectedItems(); if (items.size() > 1) { int decision = JOptionPane.showConfirmDialog(pp, "Are you sure you want to delete the " +items.size()+" selected items?", "Multiple Delete", JOptionPane.YES_NO_OPTION); if (decision == JOptionPane.NO_OPTION) { return; } } Iterator it = items.iterator(); while (it.hasNext()) { Selectable item = (Selectable) it.next(); if (item instanceof TablePane) { TablePane tp = (TablePane) item; int colidx; if ( (colidx = tp.getSelectedColumnIndex()) >= 0) { try { tp.getModel().removeColumn(colidx); } catch (LockedColumnException ex) { JOptionPane.showMessageDialog((JComponent) item, ex.getMessage()); } } else { pp.db.removeChild(tp.getModel()); logger.debug("removing element from tableNames set: " + tp.getModel().getTableName()); logger.debug("before delete: " + Arrays.toString(pp.tableNames.toArray())); pp.tableNames.remove(tp.getModel().getTableName().toLowerCase()); logger.debug("after delete: " + Arrays.toString(pp.tableNames.toArray())); } } else if (item instanceof Relationship) { Relationship r = (Relationship) item; SQLRelationship sr = r.getModel(); sr.getPkTable().removeExportedKey(sr); sr.getFkTable().removeImportedKey(sr); } else { JOptionPane.showMessageDialog((JComponent) item, "The selected item type is not recognised"); } | public void actionPerformed(ActionEvent evt) { List items = pp.getSelectedItems(); if (items.size() > 1) { int decision = JOptionPane.showConfirmDialog(pp, "Are you sure you want to delete the " +items.size()+" selected items?", "Multiple Delete", JOptionPane.YES_NO_OPTION); if (decision == JOptionPane.NO_OPTION) { return; } } Iterator it = items.iterator(); while (it.hasNext()) { Selectable item = (Selectable) it.next(); if (item instanceof TablePane) { TablePane tp = (TablePane) item; int colidx; if ( (colidx = tp.getSelectedColumnIndex()) >= 0) { // a column in the selected table try { tp.getModel().removeColumn(colidx); // FIXME: loop inside here to support multiple column deletion? } catch (LockedColumnException ex) { JOptionPane.showMessageDialog((JComponent) item, ex.getMessage()); } } else { // the whole table pp.db.removeChild(tp.getModel()); } } else if (item instanceof Relationship) { Relationship r = (Relationship) item; SQLRelationship sr = r.getModel(); sr.getPkTable().removeExportedKey(sr); sr.getFkTable().removeImportedKey(sr); } else { JOptionPane.showMessageDialog((JComponent) item, "The selected item type is not recognised"); } } } |
} Iterator it = items.iterator(); while (it.hasNext()) { Selectable item = (Selectable) it.next(); if (item instanceof TablePane) { TablePane tp = (TablePane) item; int colidx; if ( (colidx = tp.getSelectedColumnIndex()) >= 0) { | } else if (evt.getActionCommand().equals(ArchitectSwingConstants.ACTION_COMMAND_SRC_DBTREE)) { TreePath [] selections = dbt.getSelectionPaths(); if (selections.length > 1) { int decision = JOptionPane.showConfirmDialog(pp, "Are you sure you want to delete the " +selections.length+" selected items?", "Multiple Delete", JOptionPane.YES_NO_OPTION); if (decision == JOptionPane.NO_OPTION) { return; } } Iterator it = Arrays.asList(selections).iterator(); while (it.hasNext()) { TreePath tp = (TreePath) it.next(); SQLObject so = (SQLObject) tp.getLastPathComponent(); if (so instanceof SQLTable) { SQLTable st = (SQLTable) so; pp.db.removeChild(st); pp.tableNames.remove(st.getTableName().toLowerCase()); } else if (so instanceof SQLColumn) { | public void actionPerformed(ActionEvent evt) { List items = pp.getSelectedItems(); if (items.size() > 1) { int decision = JOptionPane.showConfirmDialog(pp, "Are you sure you want to delete the " +items.size()+" selected items?", "Multiple Delete", JOptionPane.YES_NO_OPTION); if (decision == JOptionPane.NO_OPTION) { return; } } Iterator it = items.iterator(); while (it.hasNext()) { Selectable item = (Selectable) it.next(); if (item instanceof TablePane) { TablePane tp = (TablePane) item; int colidx; if ( (colidx = tp.getSelectedColumnIndex()) >= 0) { // a column in the selected table try { tp.getModel().removeColumn(colidx); // FIXME: loop inside here to support multiple column deletion? } catch (LockedColumnException ex) { JOptionPane.showMessageDialog((JComponent) item, ex.getMessage()); } } else { // the whole table pp.db.removeChild(tp.getModel()); } } else if (item instanceof Relationship) { Relationship r = (Relationship) item; SQLRelationship sr = r.getModel(); sr.getPkTable().removeExportedKey(sr); sr.getFkTable().removeImportedKey(sr); } else { JOptionPane.showMessageDialog((JComponent) item, "The selected item type is not recognised"); } } } |
tp.getModel().removeColumn(colidx); | SQLColumn sc = (SQLColumn)so; SQLTable st = sc.getParentTable(); st.removeColumn(sc); | public void actionPerformed(ActionEvent evt) { List items = pp.getSelectedItems(); if (items.size() > 1) { int decision = JOptionPane.showConfirmDialog(pp, "Are you sure you want to delete the " +items.size()+" selected items?", "Multiple Delete", JOptionPane.YES_NO_OPTION); if (decision == JOptionPane.NO_OPTION) { return; } } Iterator it = items.iterator(); while (it.hasNext()) { Selectable item = (Selectable) it.next(); if (item instanceof TablePane) { TablePane tp = (TablePane) item; int colidx; if ( (colidx = tp.getSelectedColumnIndex()) >= 0) { // a column in the selected table try { tp.getModel().removeColumn(colidx); // FIXME: loop inside here to support multiple column deletion? } catch (LockedColumnException ex) { JOptionPane.showMessageDialog((JComponent) item, ex.getMessage()); } } else { // the whole table pp.db.removeChild(tp.getModel()); } } else if (item instanceof Relationship) { Relationship r = (Relationship) item; SQLRelationship sr = r.getModel(); sr.getPkTable().removeExportedKey(sr); sr.getFkTable().removeImportedKey(sr); } else { JOptionPane.showMessageDialog((JComponent) item, "The selected item type is not recognised"); } } } |
JOptionPane.showMessageDialog((JComponent) item, ex.getMessage()); | JOptionPane.showMessageDialog(dbt, ex.getMessage()); | public void actionPerformed(ActionEvent evt) { List items = pp.getSelectedItems(); if (items.size() > 1) { int decision = JOptionPane.showConfirmDialog(pp, "Are you sure you want to delete the " +items.size()+" selected items?", "Multiple Delete", JOptionPane.YES_NO_OPTION); if (decision == JOptionPane.NO_OPTION) { return; } } Iterator it = items.iterator(); while (it.hasNext()) { Selectable item = (Selectable) it.next(); if (item instanceof TablePane) { TablePane tp = (TablePane) item; int colidx; if ( (colidx = tp.getSelectedColumnIndex()) >= 0) { // a column in the selected table try { tp.getModel().removeColumn(colidx); // FIXME: loop inside here to support multiple column deletion? } catch (LockedColumnException ex) { JOptionPane.showMessageDialog((JComponent) item, ex.getMessage()); } } else { // the whole table pp.db.removeChild(tp.getModel()); } } else if (item instanceof Relationship) { Relationship r = (Relationship) item; SQLRelationship sr = r.getModel(); sr.getPkTable().removeExportedKey(sr); sr.getFkTable().removeImportedKey(sr); } else { JOptionPane.showMessageDialog((JComponent) item, "The selected item type is not recognised"); } } } |
pp.db.removeChild(tp.getModel()); | JOptionPane.showMessageDialog(dbt, "The selected SQLObject type is not recognised: " + so.getClass().getName()); | public void actionPerformed(ActionEvent evt) { List items = pp.getSelectedItems(); if (items.size() > 1) { int decision = JOptionPane.showConfirmDialog(pp, "Are you sure you want to delete the " +items.size()+" selected items?", "Multiple Delete", JOptionPane.YES_NO_OPTION); if (decision == JOptionPane.NO_OPTION) { return; } } Iterator it = items.iterator(); while (it.hasNext()) { Selectable item = (Selectable) it.next(); if (item instanceof TablePane) { TablePane tp = (TablePane) item; int colidx; if ( (colidx = tp.getSelectedColumnIndex()) >= 0) { // a column in the selected table try { tp.getModel().removeColumn(colidx); // FIXME: loop inside here to support multiple column deletion? } catch (LockedColumnException ex) { JOptionPane.showMessageDialog((JComponent) item, ex.getMessage()); } } else { // the whole table pp.db.removeChild(tp.getModel()); } } else if (item instanceof Relationship) { Relationship r = (Relationship) item; SQLRelationship sr = r.getModel(); sr.getPkTable().removeExportedKey(sr); sr.getFkTable().removeImportedKey(sr); } else { JOptionPane.showMessageDialog((JComponent) item, "The selected item type is not recognised"); } } } |
} else if (item instanceof Relationship) { Relationship r = (Relationship) item; SQLRelationship sr = r.getModel(); sr.getPkTable().removeExportedKey(sr); sr.getFkTable().removeImportedKey(sr); } else { JOptionPane.showMessageDialog((JComponent) item, "The selected item type is not recognised"); | public void actionPerformed(ActionEvent evt) { List items = pp.getSelectedItems(); if (items.size() > 1) { int decision = JOptionPane.showConfirmDialog(pp, "Are you sure you want to delete the " +items.size()+" selected items?", "Multiple Delete", JOptionPane.YES_NO_OPTION); if (decision == JOptionPane.NO_OPTION) { return; } } Iterator it = items.iterator(); while (it.hasNext()) { Selectable item = (Selectable) it.next(); if (item instanceof TablePane) { TablePane tp = (TablePane) item; int colidx; if ( (colidx = tp.getSelectedColumnIndex()) >= 0) { // a column in the selected table try { tp.getModel().removeColumn(colidx); // FIXME: loop inside here to support multiple column deletion? } catch (LockedColumnException ex) { JOptionPane.showMessageDialog((JComponent) item, ex.getMessage()); } } else { // the whole table pp.db.removeChild(tp.getModel()); } } else if (item instanceof Relationship) { Relationship r = (Relationship) item; SQLRelationship sr = r.getModel(); sr.getPkTable().removeExportedKey(sr); sr.getFkTable().removeImportedKey(sr); } else { JOptionPane.showMessageDialog((JComponent) item, "The selected item type is not recognised"); } } } |
|
} | } else { } | public void actionPerformed(ActionEvent evt) { List items = pp.getSelectedItems(); if (items.size() > 1) { int decision = JOptionPane.showConfirmDialog(pp, "Are you sure you want to delete the " +items.size()+" selected items?", "Multiple Delete", JOptionPane.YES_NO_OPTION); if (decision == JOptionPane.NO_OPTION) { return; } } Iterator it = items.iterator(); while (it.hasNext()) { Selectable item = (Selectable) it.next(); if (item instanceof TablePane) { TablePane tp = (TablePane) item; int colidx; if ( (colidx = tp.getSelectedColumnIndex()) >= 0) { // a column in the selected table try { tp.getModel().removeColumn(colidx); // FIXME: loop inside here to support multiple column deletion? } catch (LockedColumnException ex) { JOptionPane.showMessageDialog((JComponent) item, ex.getMessage()); } } else { // the whole table pp.db.removeChild(tp.getModel()); } } else if (item instanceof Relationship) { Relationship r = (Relationship) item; SQLRelationship sr = r.getModel(); sr.getPkTable().removeExportedKey(sr); sr.getFkTable().removeImportedKey(sr); } else { JOptionPane.showMessageDialog((JComponent) item, "The selected item type is not recognised"); } } } |
throw new JellyTagException( "this tag must be nested within a <tr> tag" ); | throw new JellyTagException( "this tag must be nested within a <gridBagLayout> tag" ); | public void addChild(Component component, Object constraints) throws JellyTagException { GridBagLayoutTag tag = (GridBagLayoutTag) findAncestorWithClass( GridBagLayoutTag.class ); if (tag == null) { throw new JellyTagException( "this tag must be nested within a <tr> tag" ); } tag.addLayoutComponent(component, getConstraints()); } |
if (tag instanceof NamespaceAwareTag) { NamespaceAwareTag naTag = (NamespaceAwareTag) tag; naTag.setNamespaceContext(getNamespaceContext()); } | protected void configureTag(Tag tag) throws Exception { if (tag instanceof CompilableTag) { ((CompilableTag) tag).compile(); } Tag parentTag = null; if ( parent != null ) { parentTag = parent.getTag(); } tag.setParent( parentTag ); tag.setBody( tagBody ); } |
|
if ( namespacesMap != null ) { for ( Iterator iter = namespacesMap.keySet().iterator(); iter.hasNext(); ) { | if ( tagNamespacesMap != null ) { for ( Iterator iter = tagNamespacesMap.keySet().iterator(); iter.hasNext(); ) { | protected void endNamespacePrefixes(XMLOutput output) throws SAXException { if ( namespacesMap != null ) { for ( Iterator iter = namespacesMap.keySet().iterator(); iter.hasNext(); ) { String prefix = (String) iter.next(); output.endPrefixMapping(prefix); } } } |
if ( namespacesMap != null ) { for ( Iterator iter = namespacesMap.entrySet().iterator(); iter.hasNext(); ) { | if ( tagNamespacesMap != null ) { for ( Iterator iter = tagNamespacesMap.entrySet().iterator(); iter.hasNext(); ) { | protected void startNamespacePrefixes(XMLOutput output) throws SAXException { if ( namespacesMap != null ) { for ( Iterator iter = namespacesMap.entrySet().iterator(); iter.hasNext(); ) { Map.Entry entry = (Map.Entry) iter.next(); String prefix = (String) entry.getKey(); String uri = (String) entry.getValue(); output.startPrefixMapping(prefix, uri); } } } |
this.playPen.getDatabase().getConnectionSpec().setSeqNo(9999); | public SwingUIProject(String name) throws ArchitectException { this.name = name; this.playPen = new PlayPen(new SQLDatabase()); List initialDBList = new ArrayList(); initialDBList.add(playPen.getDatabase()); this.sourceDatabases = new DBTree(initialDBList); ddlGenerator = new GenericDDLGenerator(); plExport = new PLExport(); } |
|
return super.toString() + "[name=" + name + ";city=" + city + "]"; | return super.toString() + "[name=" + name + ";location=" + location + "]"; | public String toString() { return super.toString() + "[name=" + name + ";city=" + city + "]"; } |
if (cc == 0) return; | public void tallyCCInd(byte[] a, int cc){ //case = 2, control = 1 for int cc //but to make the array indexes easier to use, we set cc to zero if it //is passed in as 2. if (cc == 2) cc = 0; byte a1 = a[0]; byte a2 = a[1]; if (a1 >= 5 && a2 >= 5){ counts[cc][0]++; counts[cc][1]++; if (allele1 == 0){ allele1 = (byte)(a1 - 4); allele2 = (byte)(a2 - 4); } }else{ //seed the alleles as soon as they're found if (allele1 == 0){ allele1 = a1; if (a1 != a2){ allele2 = a2; } }else if (allele2 == 0){ if (a1 != allele1){ allele2 = a1; }else if (a2 != allele1){ allele2 = a2; } } if (a1 != 0){ if (a1 == allele1){ counts[cc][0] ++; }else{ counts[cc][1] ++; } } if (a2 != 0){ if (a2 == allele1){ counts[cc][0]++; }else{ counts[cc][1]++; } } } } |
|
caller.setChosenMarker(null); | public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command.equals(BROWSE_GENO)){ browse(GENO_FILE); }else if (command.equals(BROWSE_HAPS)){ browse(HAPS_FILE); }else if (command.equals(BROWSE_HMP)){ browse(HMP_FILE); }else if (command.equals(BROWSE_PHASE)){ browse(PHASED_FILE); }else if (command.equals(BROWSE_SAMPLE)){ browse(SAMPLE_FILE); }else if (command.equals(BROWSE_LEGEND)){ browse(LEGEND_FILE); }else if (command.equals(BROWSE_INFO)){ browse(INFO_FILE); }else if (command.equals(BROWSE_ASSOC)){ browse(ASSOC_FILE); }else if (command.equals(BROWSE_WGA)){ browse(PLINK_FILE); }else if (command.equals(BROWSE_MAP)){ browse(MAP_FILE); } else if (command.equals("OK")){ //workaround for dumb Swing can't requestFocus until shown bug //this one seems to throw a harmless exception in certain versions of the linux JRE try{ SwingUtilities.invokeLater( new Runnable(){ public void run() { pedFileField.requestFocus(); }}); }catch (RuntimeException re){ } int currTab = dataFormatPane.getSelectedIndex(); if (currTab == 0){ fileType = PED_FILE; }else if (currTab == 1){ fileType = HAPS_FILE; }else if (currTab == 2){ fileType = HMP_FILE; }else if (currTab == 3){ fileType = PHASED_FILE; }else if (currTab == 4){ fileType = PHASEDHMPDL_FILE; }else if (currTab == 5){ fileType = PLINK_FILE; } HaploView caller = (HaploView)this.getParent(); if(missingCutoffField.getText().equals("")) { Options.setMissingThreshold(1); } else { double missingThreshold = (double)(Integer.parseInt(missingCutoffField.getText())) / 100; if(missingThreshold > 1) { JOptionPane.showMessageDialog(caller, "Missing cutoff must be between 0 and 100", "Invalid value", JOptionPane.ERROR_MESSAGE); return; } Options.setMissingThreshold(missingThreshold); } if (doAssociation.isSelected() && fileType == PED_FILE){ if (trioButton.isSelected()){ Options.setAssocTest(ASSOC_TRIO); if(standardTDT.isSelected()){ Options.setTdtType(TDT_STD); }else if(parenTDT.isSelected()) { Options.setTdtType(TDT_PAREN); } } else { Options.setAssocTest(ASSOC_CC); } }else{ Options.setAssocTest(ASSOC_NONE); } if (xChrom.isSelected() && fileType == PED_FILE){ Chromosome.setDataChrom("chrx"); }else if (hapsXChrom.isSelected() && fileType == HAPS_FILE){ Chromosome.setDataChrom("chrx"); } else { Chromosome.setDataChrom("none"); } if (doGB.isSelected() && fileType == HMP_FILE){ Options.setShowGBrowse(true); }else{ Options.setShowGBrowse(false); } Options.setgBrowseLeft(0); Options.setgBrowseRight(0); if (maxComparisonDistField.getText().equals("")){ Options.setMaxDistance(0); }else{ Options.setMaxDistance(Integer.parseInt(maxComparisonDistField.getText())); } if (fileType == PHASED_FILE){ if (gZip.isSelected()){ Options.setGzip(true); }else{ Options.setGzip(false); } isDownloaded = false; if (phaseDoGB.isSelected()){ Options.setShowGBrowse(true); if (loadChromChooser.getSelectedIndex() == -1){ JOptionPane.showMessageDialog(caller, "HapMap Info Track download requires a chromosome.", "Invalid value", JOptionPane.ERROR_MESSAGE); return; } }else{ Options.setShowGBrowse(false); } if (loadChromChooser.getSelectedIndex() == -1){ chromChoice = ""; }else{ chromChoice = (String)loadChromChooser.getSelectedItem(); } } if (fileType == PHASEDHMPDL_FILE){ isDownloaded = true; if (downloadDoGB.isSelected()){ Options.setShowGBrowse(true); }else{ Options.setShowGBrowse(false); } if (chromChooser.getSelectedIndex() == -1){ JOptionPane.showMessageDialog(caller, "Please select a chromosome.", "Invalid value", JOptionPane.ERROR_MESSAGE); return; } if (chromStartField.getText().equals("")){ JOptionPane.showMessageDialog(caller, "Please enter a starting value.", "Invalid value", JOptionPane.ERROR_MESSAGE); return; } if (chromEndField.getText().equals("")){ JOptionPane.showMessageDialog(caller, "Please enter an ending value.", "Invalid value", JOptionPane.ERROR_MESSAGE); return; } if (Integer.parseInt(chromStartField.getText()) >= Integer.parseInt(chromEndField.getText())){ JOptionPane.showMessageDialog(caller, "End position must be larger then start position.", "Invalid value", JOptionPane.ERROR_MESSAGE); return; } chromChoice = (String)chromChooser.getSelectedItem(); popChoice = (String)popChooser.getSelectedItem(); phaseChoice = (String)phaseChooser.getSelectedItem(); } if (fileType == PLINK_FILE){ if (embeddedMap.isSelected()){ embed = "Y"; } } String[] returnStrings; if (fileType == HAPS_FILE){ returnStrings = new String[]{hapsFileField.getText(), hapsInfoField.getText(),null}; if (returnStrings[1].equals("")) returnStrings[1] = null; }else if (fileType == HMP_FILE){ returnStrings = new String[]{hmpFileField.getText(),null,null}; }else if (fileType == PHASED_FILE ){ returnStrings = new String[]{phaseFileField.getText(), phaseSampleField.getText(), phaseLegendField.getText(),"",chromChoice}; }else if (fileType == PHASEDHMPDL_FILE){ returnStrings = new String[]{"Chr" + chromChoice + ":" + popChoice + ":" + chromStartField.getText() + ".." + chromEndField.getText(), popChoice, chromStartField.getText(), chromEndField.getText(), chromChoice, phaseChoice}; }else if (fileType == PLINK_FILE){ returnStrings = new String[]{plinkFileField.getText(), plinkMapField.getText(),null,embed}; } else{ returnStrings = new String[]{pedFileField.getText(), pedInfoField.getText(), testFileField.getText()}; if (returnStrings[1].equals("")) returnStrings[1] = null; if (returnStrings[2].equals("") || !doAssociation.isSelected()) returnStrings[2] = null; } //if a dataset was previously loaded during this session, discard the display panes for it. caller.clearDisplays(); caller.setPlinkData(null,null); this.dispose(); if (fileType != PLINK_FILE){ caller.readGenotypes(returnStrings, fileType, isDownloaded); }else{ caller.readWGA(returnStrings); } }else if (command.equals("Cancel")){ this.dispose(); }else if (command.equals("association")){ switchAssoc(doAssociation.isSelected()); }else if(command.equals("tdt")){ standardTDT.setEnabled(true); if (!xChrom.isSelected()){ parenTDT.setEnabled(true); } }else if(command.equals("ccButton")){ standardTDT.setEnabled(false); parenTDT.setEnabled(false); }else if (command.equals("xChrom")){ if (xChrom.isSelected()){ parenTDT.setEnabled(false); standardTDT.setSelected(true); }else if (standardTDT.isEnabled()){ parenTDT.setEnabled(true); } }else if (command.equals("Integrated Map Info")){ if (embeddedMap.isSelected()){ embeddedMap.setSelected(true); mapLabel.setEnabled(false); plinkMapField.setEnabled(false); browsePlinkMapButton.setEnabled(false); }else{ embeddedMap.setSelected(false); mapLabel.setEnabled(true); plinkMapField.setEnabled(true); browsePlinkMapButton.setEnabled(true); } }else if (command.equals("Proxy Settings")){ ProxyDialog pd = new ProxyDialog(this,"Proxy Settings"); pd.pack(); pd.setVisible(true); } } |
|
this.doTag(output,true); | super.doTag(output); clearBean(); | public void doTag(XMLOutput output) throws JellyTagException { this.doTag(output,true); } |
if(infoFileName == null) { | if(infoFileName == null && hapmapFileName == null) { | private void argHandler(String[] args){ int maxDistance = -1; //this means that user didn't specify any output type if it doesn't get changed below blockOutputType = -1; double hapThresh = -1; double minimumMAF=-1; double spacingThresh = -1; double minimumGenoPercent = -1; double hwCutoff = -1; double missingCutoff = -1; int maxMendel = -1; boolean assocTDT = false; boolean assocCC = false; permutationCount = 0; for(int i =0; i < args.length; i++) { if(args[i].equalsIgnoreCase("-help") || args[i].equalsIgnoreCase("-h")) { System.out.println(HELP_OUTPUT); System.exit(0); } else if(args[i].equalsIgnoreCase("-n") || args[i].equalsIgnoreCase("-nogui")) { nogui = true; } else if(args[i].equalsIgnoreCase("-p") || args[i].equalsIgnoreCase("-pedfile")) { i++; if( i>=args.length || (args[i].charAt(0) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(pedFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last pedfile listed will be used"); } pedFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-pcloadletter")){ System.err.println("PC LOADLETTER?! What the fuck does that mean?!"); System.exit(31337); } else if (args[i].equalsIgnoreCase("-skipcheck") || args[i].equalsIgnoreCase("--skipcheck")){ skipCheck = true; } else if (args[i].equalsIgnoreCase("-excludeMarkers")){ i++; if(i>=args.length || (args[i].charAt(0) == '-')){ System.out.println("-excludeMarkers requires a list of markers"); System.exit(1); } else { StringTokenizer str = new StringTokenizer(args[i],","); try { if (!quietMode) System.out.print("Excluding markers: "); while(str.hasMoreTokens()) { String token = str.nextToken(); if(token.indexOf("..") != -1) { int lastIndex = token.indexOf(".."); int rangeStart = Integer.parseInt(token.substring(0,lastIndex)); int rangeEnd = Integer.parseInt(token.substring(lastIndex+2,token.length())); for(int j=rangeStart;j<=rangeEnd;j++) { if (!quietMode) System.out.print(j+" "); excludedMarkers.add(new Integer(j)); } } else { if (!quietMode) System.out.println(token+" "); excludedMarkers.add(new Integer(token)); } } if (!quietMode) System.out.println(); } catch(NumberFormatException nfe) { System.out.println("-excludeMarkers argument should be of the format: 1,3,5..8,12"); System.exit(1); } } } else if(args[i].equalsIgnoreCase("-ha") || args[i].equalsIgnoreCase("-l") || args[i].equalsIgnoreCase("-haps")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(hapsFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last haps file listed will be used"); } hapsFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-i") || args[i].equalsIgnoreCase("-info")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(infoFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last info file listed will be used"); } infoFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-a") || args[i].equalsIgnoreCase("-hapmap")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(hapmapFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last hapmap file listed will be used"); } hapmapFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-k") || args[i].equalsIgnoreCase("-blocks")) { i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ blockFileName = args[i]; blockOutputType = BLOX_CUSTOM; }else{ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } } else if (args[i].equalsIgnoreCase("-png")){ outputPNG = true; } else if (args[i].equalsIgnoreCase("-smallpng") || args[i].equalsIgnoreCase("-compressedPNG")){ outputCompressedPNG = true; } else if (args[i].equalsIgnoreCase("-track")){ i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ trackFileName = args[i]; }else{ System.out.println("-track requires a filename"); System.exit(1); } } else if(args[i].equalsIgnoreCase("-o") || args[i].equalsIgnoreCase("-output") || args[i].equalsIgnoreCase("-blockoutput")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(blockOutputType != -1){ System.out.println("only one output argument is allowed"); System.exit(1); } if(args[i].equalsIgnoreCase("SFS") || args[i].equalsIgnoreCase("GAB")){ blockOutputType = BLOX_GABRIEL; } else if(args[i].equalsIgnoreCase("GAM")){ blockOutputType = BLOX_4GAM; } else if(args[i].equalsIgnoreCase("MJD") || args[i].equalsIgnoreCase("SPI")){ blockOutputType = BLOX_SPINE; } else if(args[i].equalsIgnoreCase("ALL")) { blockOutputType = BLOX_ALL; } } else { //defaults to SFS output blockOutputType = BLOX_GABRIEL; i--; } } else if(args[i].equalsIgnoreCase("-d") || args[i].equalsIgnoreCase("--dprime") || args[i].equalsIgnoreCase("-dprime")) { outputDprime = true; } else if (args[i].equalsIgnoreCase("-c") || args[i].equalsIgnoreCase("-check")){ outputCheck = true; } else if(args[i].equalsIgnoreCase("-m") || args[i].equalsIgnoreCase("-maxdistance")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires an integer argument"); System.exit(1); } else { if(maxDistance != -1){ System.out.println("only one "+args[i-1] + " argument allowed"); System.exit(1); } try { maxDistance = Integer.parseInt(args[i]); if(maxDistance<0){ System.out.println(args[i-1] + " argument must be a positive integer"); System.exit(1); } } catch(NumberFormatException nfe) { System.out.println(args[i-1] + " argument must be a positive integer"); System.exit(1); } } } else if(args[i].equalsIgnoreCase("-b") || args[i].equalsIgnoreCase("-batch")) { //batch mode i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(batchFileName != null){ System.out.println("multiple " + args[i-1] + " arguments found. only last batch file listed will be used"); } batchFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-hapthresh")) { i++; hapThresh = getDoubleArg(args,i,"-hapthresh",0,1); } else if(args[i].equalsIgnoreCase("-spacing")) { i++; spacingThresh = getDoubleArg(args,i,"-spacing",0,1); } else if(args[i].equalsIgnoreCase("-minMAF")) { i++; minimumMAF = getDoubleArg(args,i,"-minMAF",0,0.5); } else if(args[i].equalsIgnoreCase("-minGeno") || args[i].equalsIgnoreCase("-minGenoPercent")) { i++; minimumGenoPercent = getDoubleArg(args,i,"-minGeno",0,1); } else if(args[i].equalsIgnoreCase("-hwcutoff")) { i++; hwCutoff = getDoubleArg(args,i,"-hwcutoff",0,1); } else if(args[i].equalsIgnoreCase("-maxMendel") ) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println("-maxMendel requires an integer argument"); System.exit(1); } else { try { maxMendel = Integer.parseInt(args[i]); if(maxMendel<0){ System.out.println("-maxMendel argument must be a positive integer"); System.exit(1); } } catch(NumberFormatException nfe) { System.out.println("-maxMendel argument must be a positive integer"); System.exit(1); } } } else if(args[i].equalsIgnoreCase("-missingcutoff")) { i++; missingCutoff = getDoubleArg(args,i,"-missingCutoff",0,1); } else if(args[i].equalsIgnoreCase("-assoctdt")) { assocTDT = true; } else if(args[i].equalsIgnoreCase("-assoccc")) { assocCC = true; } else if(args[i].equalsIgnoreCase("-ldcolorscheme")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(args[i].equalsIgnoreCase("default")){ Options.setLDColorScheme(STD_SCHEME); } else if(args[i].equalsIgnoreCase("RSQ")){ Options.setLDColorScheme(RSQ_SCHEME); } else if(args[i].equalsIgnoreCase("DPALT") ){ Options.setLDColorScheme(WMF_SCHEME); } else if(args[i].equalsIgnoreCase("GAB")) { Options.setLDColorScheme(GAB_SCHEME); } else if(args[i].equalsIgnoreCase("GAM")) { Options.setLDColorScheme(GAM_SCHEME); } } else { //defaults to STD color scheme Options.setLDColorScheme(STD_SCHEME); i--; } } else if(args[i].equalsIgnoreCase("-permtests")) { i++; int permCount=0; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println("-permtests requires an integer argument"); System.exit(1); } else { try { permCount = Integer.parseInt(args[i]); if(permCount<0){ System.out.println("-permtests argument must be a positive integer"); System.exit(1); } } catch(NumberFormatException nfe) { System.out.println("-permtests argument must be a positive integer"); System.exit(1); } } doPermutationTest = true; permutationCount = permCount; } else if(args[i].equalsIgnoreCase("-customassoc")) { i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ customAssocTestsFileName = args[i]; }else{ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } } else if(args[i].equalsIgnoreCase("-doTagging")) { doTagging = true; } else if(args[i].equalsIgnoreCase("-tagrSqCutoff")) { i++; tagRSquaredCutOff = getDoubleArg(args,i,"-tagrSqCutoff",0,1); } else if(args[i].equalsIgnoreCase("-includeTags")) { i++; if(i>=args.length || args[i].charAt(0) == '-') { System.out.println(args[i-1] + " requires a list of marker names."); System.exit(1); } StringTokenizer str = new StringTokenizer(args[i],","); forceIncludeTags = new Vector(); while(str.hasMoreTokens()) { forceIncludeTags.add(str.nextToken()); } } else if (args[i].equalsIgnoreCase("-includeTagsFile")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { forceIncludeFileName =args[i]; }else { System.out.println(args[i-1] + " requires a filename"); System.exit(1); } } else if(args[i].equalsIgnoreCase("-excludeTags")) { i++; if(i>=args.length || args[i].charAt(0) == '-') { System.out.println("-excludeTags requires a list of marker names."); System.exit(1); } StringTokenizer str = new StringTokenizer(args[i],","); forceExcludeTags = new Vector(); while(str.hasMoreTokens()) { forceExcludeTags.add(str.nextToken()); } } else if (args[i].equalsIgnoreCase("-excludeTagsFile")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { forceExcludeFileName =args[i]; }else { System.out.println(args[i-1] + " requires a filename"); System.exit(1); } } else if(args[i].equalsIgnoreCase("-q") || args[i].equalsIgnoreCase("-quiet")) { quietMode = true; } else { System.out.println("invalid parameter specified: " + args[i]); System.exit(1); } } int countOptions = 0; if(pedFileName != null) { countOptions++; } if(hapsFileName != null) { countOptions++; } if(hapmapFileName != null) { countOptions++; } if(batchFileName != null) { countOptions++; } if(countOptions > 1) { System.out.println("Only one genotype input file may be specified on the command line."); System.exit(1); } else if(countOptions == 0 && nogui) { System.out.println("You must specify a genotype input file."); System.exit(1); } //mess with vars, set defaults, etc if(skipCheck && !quietMode) { System.out.println("Skipping genotype file check"); } if(maxDistance == -1){ maxDistance = 500; }else{ if (!quietMode) System.out.println("Max LD comparison distance = " +maxDistance); } Options.setMaxDistance(maxDistance); if(hapThresh != -1) { Options.setHaplotypeDisplayThreshold((int)(hapThresh*100)); if (!quietMode) System.out.println("Haplotype display threshold = " + hapThresh); } if(minimumMAF != -1) { CheckData.mafCut = minimumMAF; if (!quietMode) System.out.println("Minimum MAF = " + minimumMAF); } if(minimumGenoPercent != -1) { CheckData.failedGenoCut = (int)(minimumGenoPercent*100); if (!quietMode) System.out.println("Minimum SNP genotype % = " + minimumGenoPercent); } if(hwCutoff != -1) { CheckData.hwCut = hwCutoff; if (!quietMode) System.out.println("Hardy Weinberg equilibrium p-value cutoff = " + hwCutoff); } if(maxMendel != -1) { CheckData.numMendErrCut = maxMendel; if (!quietMode) System.out.println("Maximum number of Mendel errors = "+maxMendel); } if(spacingThresh != -1) { Options.setSpacingThreshold(spacingThresh); if (!quietMode) System.out.println("LD display spacing value = "+spacingThresh); } if(missingCutoff != -1) { Options.setMissingThreshold(missingCutoff); if (!quietMode) System.out.println("Maximum amount of missing data allowed per individual = "+missingCutoff); } if(assocTDT) { Options.setAssocTest(ASSOC_TRIO); } else if(assocCC) { Options.setAssocTest(ASSOC_CC); } if(doPermutationTest) { if(!assocCC && !assocTDT) { System.out.println("An association test type must be specified for permutation tests to be performed."); System.exit(1); } } if(customAssocTestsFileName != null) { if(!assocCC && !assocTDT) { System.out.println("An association test type must be specified when using a custom association test file."); System.exit(1); } if(infoFileName == null) { System.out.println("A marker info file must be specified when using a custom association test file."); System.exit(1); } } if(doTagging) { if(infoFileName == null) { System.out.println("A marker info file must be specified when using -doTagging"); System.exit(1); } if(blockOutputType == -1) { System.out.println("a block output type must be specified when using tagger"); System.exit(1); } if(forceExcludeTags == null) { forceExcludeTags = new Vector(); } else if (forceExcludeFileName != null) { System.out.println("-excludeTags and -excludeTagsFile cannot both be used"); System.exit(1); } if(forceExcludeFileName != null) { File excludeFile = new File(forceExcludeFileName); forceExcludeTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(excludeFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ forceExcludeTags.add(line); } } }catch(IOException ioe) { System.out.println("An error occured while reading the file specified by -excludeTagsFile."); System.exit(1); } } if(forceIncludeTags == null ) { forceIncludeTags = new Vector(); } else if (forceIncludeFileName != null) { System.out.println("-includeTags and -includeTagsFile cannot both be used"); System.exit(1); } if(forceIncludeFileName != null) { File includeFile = new File(forceIncludeFileName); forceIncludeTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(includeFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ forceIncludeTags.add(line); } } }catch(IOException ioe) { System.out.println("An error occured while reading the file specified by -includeTagsFile."); System.exit(1); } } //check that there isn't any overlap between include/exclude lists Vector tempInclude = (Vector) forceIncludeTags.clone(); tempInclude.retainAll(forceExcludeTags); if(tempInclude.size() > 0) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < tempInclude.size(); i++) { String s = (String) tempInclude.elementAt(i); sb.append(s).append(","); } System.out.println("The following markers appear in both the include and exclude lists: " + sb.toString()); } if(tagRSquaredCutOff != -1) { Options.setTaggerRsqCutoff(tagRSquaredCutOff); } } else if(forceExcludeTags != null || forceIncludeTags != null || tagRSquaredCutOff != -1) { System.out.println("-tagrSqCutoff, -excludeTags, -excludeTagsFile, -includeTags and -includeTagsFile cannot be used without -doTagging"); System.exit(1); } } |
System.exit(1); } if(blockOutputType == -1) { System.out.println("a block output type must be specified when using tagger"); | private void argHandler(String[] args){ int maxDistance = -1; //this means that user didn't specify any output type if it doesn't get changed below blockOutputType = -1; double hapThresh = -1; double minimumMAF=-1; double spacingThresh = -1; double minimumGenoPercent = -1; double hwCutoff = -1; double missingCutoff = -1; int maxMendel = -1; boolean assocTDT = false; boolean assocCC = false; permutationCount = 0; for(int i =0; i < args.length; i++) { if(args[i].equalsIgnoreCase("-help") || args[i].equalsIgnoreCase("-h")) { System.out.println(HELP_OUTPUT); System.exit(0); } else if(args[i].equalsIgnoreCase("-n") || args[i].equalsIgnoreCase("-nogui")) { nogui = true; } else if(args[i].equalsIgnoreCase("-p") || args[i].equalsIgnoreCase("-pedfile")) { i++; if( i>=args.length || (args[i].charAt(0) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(pedFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last pedfile listed will be used"); } pedFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-pcloadletter")){ System.err.println("PC LOADLETTER?! What the fuck does that mean?!"); System.exit(31337); } else if (args[i].equalsIgnoreCase("-skipcheck") || args[i].equalsIgnoreCase("--skipcheck")){ skipCheck = true; } else if (args[i].equalsIgnoreCase("-excludeMarkers")){ i++; if(i>=args.length || (args[i].charAt(0) == '-')){ System.out.println("-excludeMarkers requires a list of markers"); System.exit(1); } else { StringTokenizer str = new StringTokenizer(args[i],","); try { if (!quietMode) System.out.print("Excluding markers: "); while(str.hasMoreTokens()) { String token = str.nextToken(); if(token.indexOf("..") != -1) { int lastIndex = token.indexOf(".."); int rangeStart = Integer.parseInt(token.substring(0,lastIndex)); int rangeEnd = Integer.parseInt(token.substring(lastIndex+2,token.length())); for(int j=rangeStart;j<=rangeEnd;j++) { if (!quietMode) System.out.print(j+" "); excludedMarkers.add(new Integer(j)); } } else { if (!quietMode) System.out.println(token+" "); excludedMarkers.add(new Integer(token)); } } if (!quietMode) System.out.println(); } catch(NumberFormatException nfe) { System.out.println("-excludeMarkers argument should be of the format: 1,3,5..8,12"); System.exit(1); } } } else if(args[i].equalsIgnoreCase("-ha") || args[i].equalsIgnoreCase("-l") || args[i].equalsIgnoreCase("-haps")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(hapsFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last haps file listed will be used"); } hapsFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-i") || args[i].equalsIgnoreCase("-info")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(infoFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last info file listed will be used"); } infoFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-a") || args[i].equalsIgnoreCase("-hapmap")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(hapmapFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last hapmap file listed will be used"); } hapmapFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-k") || args[i].equalsIgnoreCase("-blocks")) { i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ blockFileName = args[i]; blockOutputType = BLOX_CUSTOM; }else{ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } } else if (args[i].equalsIgnoreCase("-png")){ outputPNG = true; } else if (args[i].equalsIgnoreCase("-smallpng") || args[i].equalsIgnoreCase("-compressedPNG")){ outputCompressedPNG = true; } else if (args[i].equalsIgnoreCase("-track")){ i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ trackFileName = args[i]; }else{ System.out.println("-track requires a filename"); System.exit(1); } } else if(args[i].equalsIgnoreCase("-o") || args[i].equalsIgnoreCase("-output") || args[i].equalsIgnoreCase("-blockoutput")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(blockOutputType != -1){ System.out.println("only one output argument is allowed"); System.exit(1); } if(args[i].equalsIgnoreCase("SFS") || args[i].equalsIgnoreCase("GAB")){ blockOutputType = BLOX_GABRIEL; } else if(args[i].equalsIgnoreCase("GAM")){ blockOutputType = BLOX_4GAM; } else if(args[i].equalsIgnoreCase("MJD") || args[i].equalsIgnoreCase("SPI")){ blockOutputType = BLOX_SPINE; } else if(args[i].equalsIgnoreCase("ALL")) { blockOutputType = BLOX_ALL; } } else { //defaults to SFS output blockOutputType = BLOX_GABRIEL; i--; } } else if(args[i].equalsIgnoreCase("-d") || args[i].equalsIgnoreCase("--dprime") || args[i].equalsIgnoreCase("-dprime")) { outputDprime = true; } else if (args[i].equalsIgnoreCase("-c") || args[i].equalsIgnoreCase("-check")){ outputCheck = true; } else if(args[i].equalsIgnoreCase("-m") || args[i].equalsIgnoreCase("-maxdistance")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires an integer argument"); System.exit(1); } else { if(maxDistance != -1){ System.out.println("only one "+args[i-1] + " argument allowed"); System.exit(1); } try { maxDistance = Integer.parseInt(args[i]); if(maxDistance<0){ System.out.println(args[i-1] + " argument must be a positive integer"); System.exit(1); } } catch(NumberFormatException nfe) { System.out.println(args[i-1] + " argument must be a positive integer"); System.exit(1); } } } else if(args[i].equalsIgnoreCase("-b") || args[i].equalsIgnoreCase("-batch")) { //batch mode i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(batchFileName != null){ System.out.println("multiple " + args[i-1] + " arguments found. only last batch file listed will be used"); } batchFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-hapthresh")) { i++; hapThresh = getDoubleArg(args,i,"-hapthresh",0,1); } else if(args[i].equalsIgnoreCase("-spacing")) { i++; spacingThresh = getDoubleArg(args,i,"-spacing",0,1); } else if(args[i].equalsIgnoreCase("-minMAF")) { i++; minimumMAF = getDoubleArg(args,i,"-minMAF",0,0.5); } else if(args[i].equalsIgnoreCase("-minGeno") || args[i].equalsIgnoreCase("-minGenoPercent")) { i++; minimumGenoPercent = getDoubleArg(args,i,"-minGeno",0,1); } else if(args[i].equalsIgnoreCase("-hwcutoff")) { i++; hwCutoff = getDoubleArg(args,i,"-hwcutoff",0,1); } else if(args[i].equalsIgnoreCase("-maxMendel") ) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println("-maxMendel requires an integer argument"); System.exit(1); } else { try { maxMendel = Integer.parseInt(args[i]); if(maxMendel<0){ System.out.println("-maxMendel argument must be a positive integer"); System.exit(1); } } catch(NumberFormatException nfe) { System.out.println("-maxMendel argument must be a positive integer"); System.exit(1); } } } else if(args[i].equalsIgnoreCase("-missingcutoff")) { i++; missingCutoff = getDoubleArg(args,i,"-missingCutoff",0,1); } else if(args[i].equalsIgnoreCase("-assoctdt")) { assocTDT = true; } else if(args[i].equalsIgnoreCase("-assoccc")) { assocCC = true; } else if(args[i].equalsIgnoreCase("-ldcolorscheme")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(args[i].equalsIgnoreCase("default")){ Options.setLDColorScheme(STD_SCHEME); } else if(args[i].equalsIgnoreCase("RSQ")){ Options.setLDColorScheme(RSQ_SCHEME); } else if(args[i].equalsIgnoreCase("DPALT") ){ Options.setLDColorScheme(WMF_SCHEME); } else if(args[i].equalsIgnoreCase("GAB")) { Options.setLDColorScheme(GAB_SCHEME); } else if(args[i].equalsIgnoreCase("GAM")) { Options.setLDColorScheme(GAM_SCHEME); } } else { //defaults to STD color scheme Options.setLDColorScheme(STD_SCHEME); i--; } } else if(args[i].equalsIgnoreCase("-permtests")) { i++; int permCount=0; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println("-permtests requires an integer argument"); System.exit(1); } else { try { permCount = Integer.parseInt(args[i]); if(permCount<0){ System.out.println("-permtests argument must be a positive integer"); System.exit(1); } } catch(NumberFormatException nfe) { System.out.println("-permtests argument must be a positive integer"); System.exit(1); } } doPermutationTest = true; permutationCount = permCount; } else if(args[i].equalsIgnoreCase("-customassoc")) { i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ customAssocTestsFileName = args[i]; }else{ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } } else if(args[i].equalsIgnoreCase("-doTagging")) { doTagging = true; } else if(args[i].equalsIgnoreCase("-tagrSqCutoff")) { i++; tagRSquaredCutOff = getDoubleArg(args,i,"-tagrSqCutoff",0,1); } else if(args[i].equalsIgnoreCase("-includeTags")) { i++; if(i>=args.length || args[i].charAt(0) == '-') { System.out.println(args[i-1] + " requires a list of marker names."); System.exit(1); } StringTokenizer str = new StringTokenizer(args[i],","); forceIncludeTags = new Vector(); while(str.hasMoreTokens()) { forceIncludeTags.add(str.nextToken()); } } else if (args[i].equalsIgnoreCase("-includeTagsFile")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { forceIncludeFileName =args[i]; }else { System.out.println(args[i-1] + " requires a filename"); System.exit(1); } } else if(args[i].equalsIgnoreCase("-excludeTags")) { i++; if(i>=args.length || args[i].charAt(0) == '-') { System.out.println("-excludeTags requires a list of marker names."); System.exit(1); } StringTokenizer str = new StringTokenizer(args[i],","); forceExcludeTags = new Vector(); while(str.hasMoreTokens()) { forceExcludeTags.add(str.nextToken()); } } else if (args[i].equalsIgnoreCase("-excludeTagsFile")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { forceExcludeFileName =args[i]; }else { System.out.println(args[i-1] + " requires a filename"); System.exit(1); } } else if(args[i].equalsIgnoreCase("-q") || args[i].equalsIgnoreCase("-quiet")) { quietMode = true; } else { System.out.println("invalid parameter specified: " + args[i]); System.exit(1); } } int countOptions = 0; if(pedFileName != null) { countOptions++; } if(hapsFileName != null) { countOptions++; } if(hapmapFileName != null) { countOptions++; } if(batchFileName != null) { countOptions++; } if(countOptions > 1) { System.out.println("Only one genotype input file may be specified on the command line."); System.exit(1); } else if(countOptions == 0 && nogui) { System.out.println("You must specify a genotype input file."); System.exit(1); } //mess with vars, set defaults, etc if(skipCheck && !quietMode) { System.out.println("Skipping genotype file check"); } if(maxDistance == -1){ maxDistance = 500; }else{ if (!quietMode) System.out.println("Max LD comparison distance = " +maxDistance); } Options.setMaxDistance(maxDistance); if(hapThresh != -1) { Options.setHaplotypeDisplayThreshold((int)(hapThresh*100)); if (!quietMode) System.out.println("Haplotype display threshold = " + hapThresh); } if(minimumMAF != -1) { CheckData.mafCut = minimumMAF; if (!quietMode) System.out.println("Minimum MAF = " + minimumMAF); } if(minimumGenoPercent != -1) { CheckData.failedGenoCut = (int)(minimumGenoPercent*100); if (!quietMode) System.out.println("Minimum SNP genotype % = " + minimumGenoPercent); } if(hwCutoff != -1) { CheckData.hwCut = hwCutoff; if (!quietMode) System.out.println("Hardy Weinberg equilibrium p-value cutoff = " + hwCutoff); } if(maxMendel != -1) { CheckData.numMendErrCut = maxMendel; if (!quietMode) System.out.println("Maximum number of Mendel errors = "+maxMendel); } if(spacingThresh != -1) { Options.setSpacingThreshold(spacingThresh); if (!quietMode) System.out.println("LD display spacing value = "+spacingThresh); } if(missingCutoff != -1) { Options.setMissingThreshold(missingCutoff); if (!quietMode) System.out.println("Maximum amount of missing data allowed per individual = "+missingCutoff); } if(assocTDT) { Options.setAssocTest(ASSOC_TRIO); } else if(assocCC) { Options.setAssocTest(ASSOC_CC); } if(doPermutationTest) { if(!assocCC && !assocTDT) { System.out.println("An association test type must be specified for permutation tests to be performed."); System.exit(1); } } if(customAssocTestsFileName != null) { if(!assocCC && !assocTDT) { System.out.println("An association test type must be specified when using a custom association test file."); System.exit(1); } if(infoFileName == null) { System.out.println("A marker info file must be specified when using a custom association test file."); System.exit(1); } } if(doTagging) { if(infoFileName == null) { System.out.println("A marker info file must be specified when using -doTagging"); System.exit(1); } if(blockOutputType == -1) { System.out.println("a block output type must be specified when using tagger"); System.exit(1); } if(forceExcludeTags == null) { forceExcludeTags = new Vector(); } else if (forceExcludeFileName != null) { System.out.println("-excludeTags and -excludeTagsFile cannot both be used"); System.exit(1); } if(forceExcludeFileName != null) { File excludeFile = new File(forceExcludeFileName); forceExcludeTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(excludeFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ forceExcludeTags.add(line); } } }catch(IOException ioe) { System.out.println("An error occured while reading the file specified by -excludeTagsFile."); System.exit(1); } } if(forceIncludeTags == null ) { forceIncludeTags = new Vector(); } else if (forceIncludeFileName != null) { System.out.println("-includeTags and -includeTagsFile cannot both be used"); System.exit(1); } if(forceIncludeFileName != null) { File includeFile = new File(forceIncludeFileName); forceIncludeTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(includeFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ forceIncludeTags.add(line); } } }catch(IOException ioe) { System.out.println("An error occured while reading the file specified by -includeTagsFile."); System.exit(1); } } //check that there isn't any overlap between include/exclude lists Vector tempInclude = (Vector) forceIncludeTags.clone(); tempInclude.retainAll(forceExcludeTags); if(tempInclude.size() > 0) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < tempInclude.size(); i++) { String s = (String) tempInclude.elementAt(i); sb.append(s).append(","); } System.out.println("The following markers appear in both the include and exclude lists: " + sb.toString()); } if(tagRSquaredCutOff != -1) { Options.setTaggerRsqCutoff(tagRSquaredCutOff); } } else if(forceExcludeTags != null || forceIncludeTags != null || tagRSquaredCutOff != -1) { System.out.println("-tagrSqCutoff, -excludeTags, -excludeTagsFile, -includeTags and -includeTagsFile cannot be used without -doTagging"); System.exit(1); } } |
|
if(textData.dpTable == null) { textData.generateDPrimeTable(); } | private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; AssociationTestSet customAssocSet; if(!quietMode && fileName != null){ System.out.println("Using data file: " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); //Vector result = null; if(fileType == HAPS_FILE){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED_FILE) { //read in ped file textData.linkageToChrom(inputFile, PED_FILE); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile textData.linkageToChrom(inputFile,HMP_FILE); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (fileType != HAPS_FILE){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } HashSet whiteListedCustomMarkers = new HashSet(); if (customAssocTestsFileName != null){ customAssocSet = new AssociationTestSet(customAssocTestsFileName); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } Hashtable snpsByName = new Hashtable(); for(int i=0;i<Chromosome.getUnfilteredSize();i++) { SNP snp = Chromosome.getUnfilteredMarker(i); snpsByName.put(snp.getName(), snp); } for(int i=0;i<forceIncludeTags.size();i++) { if(snpsByName.containsKey(forceIncludeTags.get(i))) { whiteListedCustomMarkers.add(snpsByName.get(forceIncludeTags.get(i))); } } textData.setWhiteList(whiteListedCustomMarkers); boolean[] markerResults = new boolean[Chromosome.getUnfilteredSize()]; Vector result = null; if (fileType != HAPS_FILE){ result = textData.getPedFile().getResults(); //once check has been run we can filter the markers for (int i = 0; i < result.size(); i++){ if (((((MarkerResult)result.get(i)).getRating() > 0 || skipCheck) && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2)){ markerResults[i] = true; }else{ markerResults[i] = false; } } }else{ //we haven't done the check (HAPS files) Arrays.fill(markerResults, true); } for (int i = 0; i < excludedMarkers.size(); i++){ int cur = ((Integer)excludedMarkers.elementAt(i)).intValue(); if (cur < 1 || cur > markerResults.length){ System.out.println("Excluded marker out of bounds has been ignored: " + cur + "\nMarkers must be between 1 and N, where N is the total number of markers."); System.exit(1); }else{ markerResults[cur-1] = false; } } for(int i=0;i<Chromosome.getUnfilteredSize();i++) { if(textData.isWhiteListed(Chromosome.getUnfilteredMarker(i))) { markerResults[i] = true; } } Chromosome.doFilter(markerResults); if(!quietMode && infoFile != null){ System.out.println("Using marker information file: " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); AssociationTestSet blockTestSet = null; if(blockOutputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; Haplotype[][] filtHaplos; switch(blockOutputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); if(!quietMode) { System.out.println("Using custom blocks file " + blockFileName); } cust = textData.readBlocks(blocksFile); break; case BLOX_ALL: //handled below, so we don't do anything here OutputFile = null; break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL if(blockOutputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid Gabriel blocks."); } OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile);; }else if (!quietMode){ System.out.println("Skipping block output: no valid 4 Gamete blocks."); } OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid LD Spine blocks."); } }else{ //guesses blocks based on output type determined above. textData.guessBlocks(blockOutputType, cust); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid blocks."); } } if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { if (blockOutputType == BLOX_ALL){ System.out.println("Haplotype association results cannot be used with block output \"ALL\""); }else{ if (haplos != null){ blockTestSet = new AssociationTestSet(haplos,null); blockTestSet.saveHapsToText(validateOutputFile(fileName + ".HAPASSOC")); }else if (!quietMode){ System.out.println("Skipping block association output: no valid blocks."); } } } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); if(!quietMode) { System.out.println("Using analysis track file " + trackFileName); } } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getUnfilteredSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getName()); }catch(JimiException je){ System.out.println(je.getMessage()); } } AssociationTestSet markerTestSet =null; if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC){ markerTestSet = new AssociationTestSet(textData.getPedFile(),null,Chromosome.getAllMarkers()); markerTestSet.saveSNPsToText(validateOutputFile(fileName + ".ASSOC")); } if(customAssocSet != null) { if(!quietMode) { System.out.println("Using custom association test file " + customAssocTestsFileName); } try { customAssocSet.runFileTests(textData,markerTestSet.getMarkerAssociationResults()); customAssocSet.saveResultsToText(validateOutputFile(fileName + ".CUSTASSOC")); }catch(IOException ioe) { System.out.println("An error occured writing the custom association results file."); customAssocSet = null; } } if(doPermutationTest) { AssociationTestSet permTests = null; if( customAssocSet != null) { permTests = customAssocSet; }else { permTests = new AssociationTestSet(); permTests.cat(markerTestSet); permTests.cat(blockTestSet); } final PermutationTestSet pts = new PermutationTestSet(permutationCount,textData.getSavedEMs(),textData.getPedFile(),permTests); Thread permThread = new Thread(new Runnable() { public void run() { pts.doPermutations(); } }); permThread.start(); if(!quietMode) { System.out.println("Starting " + permutationCount + " permutation tests (each . printed represents 1% of tests completed)"); } int dotsPrinted =0; while(pts.getPermutationCount() - pts.getPermutationsPerformed() > 0) { while(( (double)pts.getPermutationsPerformed() / pts.getPermutationCount())*100 > dotsPrinted) { System.out.print("."); dotsPrinted++; } try{ Thread.currentThread().sleep(100); }catch(InterruptedException ie) {} } System.out.println(); try { pts.writeResultsToFile(validateOutputFile(fileName + ".PERMUT")); } catch(IOException ioe) { System.out.println("An error occured while writing the permutation test results to file."); } } if(doTagging) { Vector snps = Chromosome.getAllMarkers(); HashSet names = new HashSet(); Hashtable idsByName = new Hashtable(); for (int i = 0; i < snps.size(); i++) { SNP snp = (SNP) snps.elementAt(i); names.add(snp.getName()); idsByName.put(snp.getName(),new Integer(i)); } Vector sitesToCapture = new Vector(); for(int i=0;i<Chromosome.getSize();i++) { sitesToCapture.add(Chromosome.getMarker(i)); } for (int i = 0; i < forceIncludeTags.size(); i++) { String s = (String) forceIncludeTags.elementAt(i); if(!names.contains(s)) { System.out.println("Marker " + s + " in the list of forced included tags does not appear in the marker info file."); System.exit(1); } } for (int i = 0; i < forceExcludeTags.size(); i++) { String s = (String) forceExcludeTags.elementAt(i); if(!names.contains(s)) { System.out.println("Marker " + s + " in the list of forced excluded tags does not appear in the marker info file."); System.exit(1); } } if(!quietMode) { System.out.println("Starting tagging."); } TaggerController tc = new TaggerController(textData,forceIncludeTags,forceExcludeTags,sitesToCapture, Tagger.AGGRESSIVE_TRIPLE); tc.runTagger(); while(!tc.isTaggingCompleted()) { try { Thread.sleep(100); }catch(InterruptedException ie) {} } tc.saveResultsToFile(validateOutputFile(fileName + ".TAGS")); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } } |
|
path = new GeneralPath(GeneralPath.WIND_EVEN_ODD, 5); | path = new GeneralPath(GeneralPath.WIND_NON_ZERO, 5); | public void paint(Graphics g, JComponent c) { logger.debug("BasicRelationshipUI is painting"); Relationship r = (Relationship) c; Graphics2D g2 = (Graphics2D) g; g2.translate(c.getX() * -1, c.getY() * -1); // playpen coordinate space if (logger.isDebugEnabled()) { g2.setColor(c.getBackground()); Rectangle bounds = c.getBounds(); g2.fillRect(bounds.x, bounds.y, bounds.width, bounds.height); g2.setColor(c.getForeground()); } try { Point pktloc = pkConnectionPoint; Point start = new Point(pktloc.x + r.getPkTable().getLocation().x, pktloc.y + r.getPkTable().getLocation().y); Point fktloc = fkConnectionPoint; Point end = new Point(fktloc.x + r.getFkTable().getLocation().x, fktloc.y + r.getFkTable().getLocation().y); // XXX: could optimise by checking if PK or FK tables have moved if (path == null) { path = new GeneralPath(GeneralPath.WIND_EVEN_ODD, 5); } else { path.reset(); } if (relationship.getPkTable() == relationship.getFkTable()) { // special case hack for self-referencing table // assume orientation is PARENT_FACES_BOTTOM | CHILD_FACES_LEFT path.moveTo(start.x, start.y); path.lineTo(start.x, start.y + getTerminationLength() * 2); path.lineTo(end.x - getTerminationLength() * 2, start.y + getTerminationLength() * 2); path.lineTo(end.x - getTerminationLength() * 2, end.y); path.lineTo(end.x, end.y); } else if ( (orientation & (PARENT_FACES_LEFT | PARENT_FACES_RIGHT)) != 0 && (orientation & (CHILD_FACES_LEFT | CHILD_FACES_RIGHT)) != 0) { int midx = (Math.abs(end.x - start.x) / 2) + Math.min(start.x, end.x); path.moveTo(start.x, start.y); path.lineTo(midx, start.y); path.lineTo(midx, end.y); path.lineTo(end.x, end.y); } else if ( (orientation & (PARENT_FACES_TOP | PARENT_FACES_BOTTOM)) != 0 && (orientation & (CHILD_FACES_TOP | CHILD_FACES_BOTTOM)) != 0) { int midy = (Math.abs(end.y - start.y) / 2) + Math.min(start.y, end.y); path.moveTo(start.x, start.y); path.lineTo(start.x, midy); path.lineTo(end.x, midy); path.lineTo(end.x, end.y); } else if ( (orientation & (PARENT_FACES_LEFT | PARENT_FACES_RIGHT)) != 0) { path.moveTo(start.x, start.y); path.lineTo(end.x, start.y); path.lineTo(end.x, end.y); } else if ( (orientation & (PARENT_FACES_TOP | PARENT_FACES_BOTTOM)) != 0) { path.moveTo(start.x, start.y); path.lineTo(start.x, end.y); path.lineTo(end.x, end.y); } else { // unknown case: draw straight line. path.moveTo(start.x, start.y); path.lineTo(end.x, end.y); } if (r.isSelected()) { g2.setColor(selectedColor); } else { g2.setColor(unselectedColor); } Stroke oldStroke = g2.getStroke(); if (relationship.getModel().isIdentifying()) { g2.setStroke(getIdentifyingStroke()); } else { g2.setStroke(getNonIdentifyingStroke()); } g2.draw(path); logger.debug("Drew path "+path); g2.setStroke(oldStroke); paintTerminations(g2, start, end, orientation); } finally { g2.translate(c.getX(), c.getY()); // playpen coordinate space } } |
logger.debug("Drew path "+path); | if (logger.isDebugEnabled()) logger.debug("Drew path "+path); | public void paint(Graphics g, JComponent c) { logger.debug("BasicRelationshipUI is painting"); Relationship r = (Relationship) c; Graphics2D g2 = (Graphics2D) g; g2.translate(c.getX() * -1, c.getY() * -1); // playpen coordinate space if (logger.isDebugEnabled()) { g2.setColor(c.getBackground()); Rectangle bounds = c.getBounds(); g2.fillRect(bounds.x, bounds.y, bounds.width, bounds.height); g2.setColor(c.getForeground()); } try { Point pktloc = pkConnectionPoint; Point start = new Point(pktloc.x + r.getPkTable().getLocation().x, pktloc.y + r.getPkTable().getLocation().y); Point fktloc = fkConnectionPoint; Point end = new Point(fktloc.x + r.getFkTable().getLocation().x, fktloc.y + r.getFkTable().getLocation().y); // XXX: could optimise by checking if PK or FK tables have moved if (path == null) { path = new GeneralPath(GeneralPath.WIND_EVEN_ODD, 5); } else { path.reset(); } if (relationship.getPkTable() == relationship.getFkTable()) { // special case hack for self-referencing table // assume orientation is PARENT_FACES_BOTTOM | CHILD_FACES_LEFT path.moveTo(start.x, start.y); path.lineTo(start.x, start.y + getTerminationLength() * 2); path.lineTo(end.x - getTerminationLength() * 2, start.y + getTerminationLength() * 2); path.lineTo(end.x - getTerminationLength() * 2, end.y); path.lineTo(end.x, end.y); } else if ( (orientation & (PARENT_FACES_LEFT | PARENT_FACES_RIGHT)) != 0 && (orientation & (CHILD_FACES_LEFT | CHILD_FACES_RIGHT)) != 0) { int midx = (Math.abs(end.x - start.x) / 2) + Math.min(start.x, end.x); path.moveTo(start.x, start.y); path.lineTo(midx, start.y); path.lineTo(midx, end.y); path.lineTo(end.x, end.y); } else if ( (orientation & (PARENT_FACES_TOP | PARENT_FACES_BOTTOM)) != 0 && (orientation & (CHILD_FACES_TOP | CHILD_FACES_BOTTOM)) != 0) { int midy = (Math.abs(end.y - start.y) / 2) + Math.min(start.y, end.y); path.moveTo(start.x, start.y); path.lineTo(start.x, midy); path.lineTo(end.x, midy); path.lineTo(end.x, end.y); } else if ( (orientation & (PARENT_FACES_LEFT | PARENT_FACES_RIGHT)) != 0) { path.moveTo(start.x, start.y); path.lineTo(end.x, start.y); path.lineTo(end.x, end.y); } else if ( (orientation & (PARENT_FACES_TOP | PARENT_FACES_BOTTOM)) != 0) { path.moveTo(start.x, start.y); path.lineTo(start.x, end.y); path.lineTo(end.x, end.y); } else { // unknown case: draw straight line. path.moveTo(start.x, start.y); path.lineTo(end.x, end.y); } if (r.isSelected()) { g2.setColor(selectedColor); } else { g2.setColor(unselectedColor); } Stroke oldStroke = g2.getStroke(); if (relationship.getModel().isIdentifying()) { g2.setStroke(getIdentifyingStroke()); } else { g2.setStroke(getNonIdentifyingStroke()); } g2.draw(path); logger.debug("Drew path "+path); g2.setStroke(oldStroke); paintTerminations(g2, start, end, orientation); } finally { g2.translate(c.getX(), c.getY()); // playpen coordinate space } } |
fireDbObjectChanged("referenceCount", oldReference, referenceCount); | public void addReference() { referenceCount++; logger.debug("incremented reference count to: " + referenceCount); } |
|
throw new IllegalStateException("Reference count is already 0; can't remove any references!"); | logger.debug("Reference count of "+ this.getParentTable() +"."+this+" was already 0"); throw new IllegalStateException("Reference count of is already 0; can't remove any references!"); | public void removeReference() { if (logger.isDebugEnabled()) { String parentName = "<no parent table>"; if (getParent() != null && getParentTable() != null) { parentName = getParentTable().getName(); } logger.debug("Trying to remove reference from "+parentName+"."+getName()+" "+hashCode()); } if (referenceCount == 0) { throw new IllegalStateException("Reference count is already 0; can't remove any references!"); } referenceCount--; logger.debug("decremented reference count to: " + referenceCount); if (referenceCount == 0) { // delete from the parent (columnsFolder) if (getParent() != null){ logger.debug("reference count is 0, deleting column from parent."); getParent().removeChild(this); } else { logger.debug("Already removed from parent"); } } } |
fireDbObjectChanged("referenceCount", oldReference, referenceCount); | public void removeReference() { if (logger.isDebugEnabled()) { String parentName = "<no parent table>"; if (getParent() != null && getParentTable() != null) { parentName = getParentTable().getName(); } logger.debug("Trying to remove reference from "+parentName+"."+getName()+" "+hashCode()); } if (referenceCount == 0) { throw new IllegalStateException("Reference count is already 0; can't remove any references!"); } referenceCount--; logger.debug("decremented reference count to: " + referenceCount); if (referenceCount == 0) { // delete from the parent (columnsFolder) if (getParent() != null){ logger.debug("reference count is 0, deleting column from parent."); getParent().removeChild(this); } else { logger.debug("Already removed from parent"); } } } |
|
System.out.println( "photovault.configfile " + confFileName ); | log.debug( "photovault.configfile " + confFileName ); | protected PhotovaultSettings() { // Load XML configuration file String confFileName = System.getProperty( "photovault.configfile" ); if ( confFileName != null ) { System.out.println( "photovault.configfile " + confFileName ); configFile = new File( confFileName ); System.out.println( configFile ); } else { // If the photovault.configfile property is not set, use file photovault.xml // in directory .photovault in user's home directory File homeDir = new File( System.getProperty( "user.home", "" ) ); File photovaultDir = new File( homeDir, ".photovault" ); if ( !photovaultDir.exists() ) { photovaultDir.mkdir(); } configFile = new File( photovaultDir, "photovault.xml" ); } if ( configFile.exists() ) { log.debug( "Using config file " + configFile.getAbsolutePath() ); databases = PhotovaultDatabases.loadDatabases( configFile ); } else { try { configFile.createNewFile(); } catch (IOException ex) { ex.printStackTrace(); } } if ( databases == null ) { databases = new PhotovaultDatabases(); } } |
System.out.println( configFile ); | log.debug( configFile ); | protected PhotovaultSettings() { // Load XML configuration file String confFileName = System.getProperty( "photovault.configfile" ); if ( confFileName != null ) { System.out.println( "photovault.configfile " + confFileName ); configFile = new File( confFileName ); System.out.println( configFile ); } else { // If the photovault.configfile property is not set, use file photovault.xml // in directory .photovault in user's home directory File homeDir = new File( System.getProperty( "user.home", "" ) ); File photovaultDir = new File( homeDir, ".photovault" ); if ( !photovaultDir.exists() ) { photovaultDir.mkdir(); } configFile = new File( photovaultDir, "photovault.xml" ); } if ( configFile.exists() ) { log.debug( "Using config file " + configFile.getAbsolutePath() ); databases = PhotovaultDatabases.loadDatabases( configFile ); } else { try { configFile.createNewFile(); } catch (IOException ex) { ex.printStackTrace(); } } if ( databases == null ) { databases = new PhotovaultDatabases(); } } |
dblist.add(SQLDatabase.getPlayPenInstance()); | public TestUI(DBConnectionSpec spec) throws ArchitectException { super("UI Test Frame"); dbcs = spec; SQLDatabase db = new SQLDatabase(spec); playpen = new PlayPen(SQLDatabase.getPlayPenInstance()); ArrayList dblist = new ArrayList(1); dblist.add(db); dbTree = new DBTree(dblist); JSplitPane splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, new JScrollPane(dbTree), new JScrollPane(playpen)); setContentPane(splitPane); pack(); splitPane.setDividerLocation(dbTree.getPreferredSize().width); JFrame controlsFrame = createControlsFrame(); controlsFrame.pack(); controlsFrame.setVisible(true); controlsFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); } |
|
return (getName().hashCode() * 31 + getType().hashCode()) * 31 + getEnclosingType().hashCode(); | return (getName().hashCode() * 31 + getType().getName().hashCode()) * 31 + getEnclosingType().getName().hashCode(); | public int hashCode() { return (getName().hashCode() * 31 + getType().hashCode()) * 31 + getEnclosingType().hashCode(); } |
Object colorValue = attributes.remove("background"); Color background = getColor(control, colorValue); control.setBackground(background); colorValue = attributes.remove("foreground"); Color foreground = getColor(control, colorValue); control.setForeground(foreground); | Object colorValue = attributes.remove("background"); Color background = getColor(control, colorValue); control.setBackground(background); | protected void setBeanProperties(Object bean, Map attributes) throws JellyTagException { if (bean instanceof Control) { Control control = (Control) bean; // Special handling of size property as the Control object breaks the // JavaBean naming conventions by overloading the setSize() method Object size = attributes.remove("size"); setSize(control, size); // Special handling of color property as the Control object breaks the // JavaBean naming conventions by overloading the setBackground() or setForeground() method Object colorValue = attributes.remove("background"); Color background = getColor(control, colorValue); control.setBackground(background); colorValue = attributes.remove("foreground"); Color foreground = getColor(control, colorValue); control.setForeground(foreground); } super.setBeanProperties(bean, attributes); } |
} | colorValue = attributes.remove("foreground"); Color foreground = getColor(control, colorValue); control.setForeground(foreground); } | protected void setBeanProperties(Object bean, Map attributes) throws JellyTagException { if (bean instanceof Control) { Control control = (Control) bean; // Special handling of size property as the Control object breaks the // JavaBean naming conventions by overloading the setSize() method Object size = attributes.remove("size"); setSize(control, size); // Special handling of color property as the Control object breaks the // JavaBean naming conventions by overloading the setBackground() or setForeground() method Object colorValue = attributes.remove("background"); Color background = getColor(control, colorValue); control.setBackground(background); colorValue = attributes.remove("foreground"); Color foreground = getColor(control, colorValue); control.setForeground(foreground); } super.setBeanProperties(bean, attributes); } |
File OutputFile; | File outputFile; | private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; AssociationTestSet customAssocSet; if(!quietMode && fileName != null){ System.out.println("Using data file: " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); //Vector result = null; if(fileType == HAPS_FILE){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED_FILE) { //read in ped file textData.linkageToChrom(inputFile, PED_FILE); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile textData.linkageToChrom(inputFile,HMP_FILE); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (fileType != HAPS_FILE){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } HashSet whiteListedCustomMarkers = new HashSet(); if (customAssocTestsFileName != null){ customAssocSet = new AssociationTestSet(customAssocTestsFileName); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } Hashtable snpsByName = new Hashtable(); for(int i=0;i<Chromosome.getUnfilteredSize();i++) { SNP snp = Chromosome.getUnfilteredMarker(i); snpsByName.put(snp.getName(), snp); } if(forceIncludeTags != null) { for(int i=0;i<forceIncludeTags.size();i++) { if(snpsByName.containsKey(forceIncludeTags.get(i))) { whiteListedCustomMarkers.add(snpsByName.get(forceIncludeTags.get(i))); } } } textData.setWhiteList(whiteListedCustomMarkers); boolean[] markerResults = new boolean[Chromosome.getUnfilteredSize()]; Vector result = null; if (fileType != HAPS_FILE){ result = textData.getPedFile().getResults(); //once check has been run we can filter the markers for (int i = 0; i < result.size(); i++){ if (((((MarkerResult)result.get(i)).getRating() > 0 || skipCheck) && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2)){ markerResults[i] = true; }else{ markerResults[i] = false; } } }else{ //we haven't done the check (HAPS files) Arrays.fill(markerResults, true); } for (int i = 0; i < excludedMarkers.size(); i++){ int cur = ((Integer)excludedMarkers.elementAt(i)).intValue(); if (cur < 1 || cur > markerResults.length){ System.out.println("Excluded marker out of bounds: " + cur + "\nMarkers must be between 1 and N, where N is the total number of markers."); System.exit(1); }else{ markerResults[cur-1] = false; } } for(int i=0;i<Chromosome.getUnfilteredSize();i++) { if(textData.isWhiteListed(Chromosome.getUnfilteredMarker(i))) { markerResults[i] = true; } } Chromosome.doFilter(markerResults); if(!quietMode && infoFile != null){ System.out.println("Using marker information file: " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); AssociationTestSet blockTestSet = null; if(blockOutputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; Haplotype[][] filtHaplos; switch(blockOutputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); if(!quietMode) { System.out.println("Using custom blocks file " + blockFileName); } cust = textData.readBlocks(blocksFile); break; case BLOX_ALL: //handled below, so we don't do anything here OutputFile = null; break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL if(blockOutputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid Gabriel blocks."); } OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile);; }else if (!quietMode){ System.out.println("Skipping block output: no valid 4 Gamete blocks."); } OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid LD Spine blocks."); } }else{ //guesses blocks based on output type determined above. textData.guessBlocks(blockOutputType, cust); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid blocks."); } } if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { if (blockOutputType == BLOX_ALL){ System.out.println("Haplotype association results cannot be used with block output \"ALL\""); }else{ if (haplos != null){ blockTestSet = new AssociationTestSet(haplos,null); blockTestSet.saveHapsToText(validateOutputFile(fileName + ".HAPASSOC")); }else if (!quietMode){ System.out.println("Skipping block association output: no valid blocks."); } } } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); if(!quietMode) { System.out.println("Using analysis track file " + trackFileName); } } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getUnfilteredSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getAbsolutePath()); }catch(JimiException je){ System.out.println(je.getMessage()); } } AssociationTestSet markerTestSet =null; if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC){ if (randomizeAffection){ Vector aff = new Vector(); int j=0, k=0; for (int i = 0; i < textData.getPedFile().getNumIndividuals(); i++){ if (i%2 == 0){ aff.add(new Integer(1)); j++; }else{ aff.add(new Integer(2)); k++; } } Collections.shuffle(aff); markerTestSet = new AssociationTestSet(textData.getPedFile(),aff,Chromosome.getAllMarkers()); }else{ markerTestSet = new AssociationTestSet(textData.getPedFile(),null,Chromosome.getAllMarkers()); } markerTestSet.saveSNPsToText(validateOutputFile(fileName + ".ASSOC")); } if(customAssocSet != null) { if(!quietMode) { System.out.println("Using custom association test file " + customAssocTestsFileName); } try { customAssocSet.setPermTests(doPermutationTest); customAssocSet.runFileTests(textData,markerTestSet.getMarkerAssociationResults()); customAssocSet.saveResultsToText(validateOutputFile(fileName + ".CUSTASSOC")); }catch(IOException ioe) { System.out.println("An error occured writing the custom association results file."); customAssocSet = null; } } if(doPermutationTest) { AssociationTestSet permTests = new AssociationTestSet(); permTests.cat(markerTestSet); if(blockTestSet != null) { permTests.cat(blockTestSet); } final PermutationTestSet pts = new PermutationTestSet(permutationCount,textData.getPedFile(),customAssocSet,permTests); Thread permThread = new Thread(new Runnable() { public void run() { if (pts.isCustom()){ pts.doPermutations(PermutationTestSet.CUSTOM); }else{ pts.doPermutations(PermutationTestSet.SINGLE_PLUS_BLOCKS); } } }); permThread.start(); if(!quietMode) { System.out.println("Starting " + permutationCount + " permutation tests (each . printed represents 1% of tests completed)"); } int dotsPrinted =0; while(pts.getPermutationCount() - pts.getPermutationsPerformed() > 0) { while(( (double)pts.getPermutationsPerformed() / pts.getPermutationCount())*100 > dotsPrinted) { System.out.print("."); dotsPrinted++; } try{ Thread.sleep(100); }catch(InterruptedException ie) {} } System.out.println(); try { pts.writeResultsToFile(validateOutputFile(fileName + ".PERMUT")); } catch(IOException ioe) { System.out.println("An error occured while writing the permutation test results to file."); } } if(tagging != Tagger.NONE) { if(textData.dpTable == null) { textData.generateDPrimeTable(); } Vector snps = Chromosome.getAllMarkers(); HashSet names = new HashSet(); for (int i = 0; i < snps.size(); i++) { SNP snp = (SNP) snps.elementAt(i); names.add(snp.getName()); } HashSet filteredNames = new HashSet(); for(int i=0;i<Chromosome.getSize();i++) { filteredNames.add(Chromosome.getMarker(i).getName()); } Vector sitesToCapture = new Vector(); for(int i=0;i<Chromosome.getSize();i++) { sitesToCapture.add(Chromosome.getMarker(i)); } for (int i = 0; i < forceIncludeTags.size(); i++) { String s = (String) forceIncludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced included tags since I don't know about it."); } } for (int i = 0; i < forceExcludeTags.size(); i++) { String s = (String) forceExcludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced excluded tags since I don't know about it."); } } //chuck out filtered jazz from excludes, and nonexistent markers from both forceExcludeTags.retainAll(filteredNames); forceIncludeTags.retainAll(names); if(!quietMode) { System.out.println("Starting tagging."); } TaggerController tc = new TaggerController(textData,forceIncludeTags,forceExcludeTags,sitesToCapture, tagging,maxNumTags,findTags); tc.runTagger(); while(!tc.isTaggingCompleted()) { try { Thread.sleep(100); }catch(InterruptedException ie) {} } tc.saveResultsToFile(validateOutputFile(fileName + ".TAGS")); tc.dumpTests(validateOutputFile(fileName + ".TESTS")); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } } |
OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); | outputFile = validateOutputFile(fileName + ".GABRIELblocks"); | private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; AssociationTestSet customAssocSet; if(!quietMode && fileName != null){ System.out.println("Using data file: " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); //Vector result = null; if(fileType == HAPS_FILE){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED_FILE) { //read in ped file textData.linkageToChrom(inputFile, PED_FILE); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile textData.linkageToChrom(inputFile,HMP_FILE); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (fileType != HAPS_FILE){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } HashSet whiteListedCustomMarkers = new HashSet(); if (customAssocTestsFileName != null){ customAssocSet = new AssociationTestSet(customAssocTestsFileName); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } Hashtable snpsByName = new Hashtable(); for(int i=0;i<Chromosome.getUnfilteredSize();i++) { SNP snp = Chromosome.getUnfilteredMarker(i); snpsByName.put(snp.getName(), snp); } if(forceIncludeTags != null) { for(int i=0;i<forceIncludeTags.size();i++) { if(snpsByName.containsKey(forceIncludeTags.get(i))) { whiteListedCustomMarkers.add(snpsByName.get(forceIncludeTags.get(i))); } } } textData.setWhiteList(whiteListedCustomMarkers); boolean[] markerResults = new boolean[Chromosome.getUnfilteredSize()]; Vector result = null; if (fileType != HAPS_FILE){ result = textData.getPedFile().getResults(); //once check has been run we can filter the markers for (int i = 0; i < result.size(); i++){ if (((((MarkerResult)result.get(i)).getRating() > 0 || skipCheck) && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2)){ markerResults[i] = true; }else{ markerResults[i] = false; } } }else{ //we haven't done the check (HAPS files) Arrays.fill(markerResults, true); } for (int i = 0; i < excludedMarkers.size(); i++){ int cur = ((Integer)excludedMarkers.elementAt(i)).intValue(); if (cur < 1 || cur > markerResults.length){ System.out.println("Excluded marker out of bounds: " + cur + "\nMarkers must be between 1 and N, where N is the total number of markers."); System.exit(1); }else{ markerResults[cur-1] = false; } } for(int i=0;i<Chromosome.getUnfilteredSize();i++) { if(textData.isWhiteListed(Chromosome.getUnfilteredMarker(i))) { markerResults[i] = true; } } Chromosome.doFilter(markerResults); if(!quietMode && infoFile != null){ System.out.println("Using marker information file: " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); AssociationTestSet blockTestSet = null; if(blockOutputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; Haplotype[][] filtHaplos; switch(blockOutputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); if(!quietMode) { System.out.println("Using custom blocks file " + blockFileName); } cust = textData.readBlocks(blocksFile); break; case BLOX_ALL: //handled below, so we don't do anything here OutputFile = null; break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL if(blockOutputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid Gabriel blocks."); } OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile);; }else if (!quietMode){ System.out.println("Skipping block output: no valid 4 Gamete blocks."); } OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid LD Spine blocks."); } }else{ //guesses blocks based on output type determined above. textData.guessBlocks(blockOutputType, cust); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid blocks."); } } if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { if (blockOutputType == BLOX_ALL){ System.out.println("Haplotype association results cannot be used with block output \"ALL\""); }else{ if (haplos != null){ blockTestSet = new AssociationTestSet(haplos,null); blockTestSet.saveHapsToText(validateOutputFile(fileName + ".HAPASSOC")); }else if (!quietMode){ System.out.println("Skipping block association output: no valid blocks."); } } } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); if(!quietMode) { System.out.println("Using analysis track file " + trackFileName); } } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getUnfilteredSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getAbsolutePath()); }catch(JimiException je){ System.out.println(je.getMessage()); } } AssociationTestSet markerTestSet =null; if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC){ if (randomizeAffection){ Vector aff = new Vector(); int j=0, k=0; for (int i = 0; i < textData.getPedFile().getNumIndividuals(); i++){ if (i%2 == 0){ aff.add(new Integer(1)); j++; }else{ aff.add(new Integer(2)); k++; } } Collections.shuffle(aff); markerTestSet = new AssociationTestSet(textData.getPedFile(),aff,Chromosome.getAllMarkers()); }else{ markerTestSet = new AssociationTestSet(textData.getPedFile(),null,Chromosome.getAllMarkers()); } markerTestSet.saveSNPsToText(validateOutputFile(fileName + ".ASSOC")); } if(customAssocSet != null) { if(!quietMode) { System.out.println("Using custom association test file " + customAssocTestsFileName); } try { customAssocSet.setPermTests(doPermutationTest); customAssocSet.runFileTests(textData,markerTestSet.getMarkerAssociationResults()); customAssocSet.saveResultsToText(validateOutputFile(fileName + ".CUSTASSOC")); }catch(IOException ioe) { System.out.println("An error occured writing the custom association results file."); customAssocSet = null; } } if(doPermutationTest) { AssociationTestSet permTests = new AssociationTestSet(); permTests.cat(markerTestSet); if(blockTestSet != null) { permTests.cat(blockTestSet); } final PermutationTestSet pts = new PermutationTestSet(permutationCount,textData.getPedFile(),customAssocSet,permTests); Thread permThread = new Thread(new Runnable() { public void run() { if (pts.isCustom()){ pts.doPermutations(PermutationTestSet.CUSTOM); }else{ pts.doPermutations(PermutationTestSet.SINGLE_PLUS_BLOCKS); } } }); permThread.start(); if(!quietMode) { System.out.println("Starting " + permutationCount + " permutation tests (each . printed represents 1% of tests completed)"); } int dotsPrinted =0; while(pts.getPermutationCount() - pts.getPermutationsPerformed() > 0) { while(( (double)pts.getPermutationsPerformed() / pts.getPermutationCount())*100 > dotsPrinted) { System.out.print("."); dotsPrinted++; } try{ Thread.sleep(100); }catch(InterruptedException ie) {} } System.out.println(); try { pts.writeResultsToFile(validateOutputFile(fileName + ".PERMUT")); } catch(IOException ioe) { System.out.println("An error occured while writing the permutation test results to file."); } } if(tagging != Tagger.NONE) { if(textData.dpTable == null) { textData.generateDPrimeTable(); } Vector snps = Chromosome.getAllMarkers(); HashSet names = new HashSet(); for (int i = 0; i < snps.size(); i++) { SNP snp = (SNP) snps.elementAt(i); names.add(snp.getName()); } HashSet filteredNames = new HashSet(); for(int i=0;i<Chromosome.getSize();i++) { filteredNames.add(Chromosome.getMarker(i).getName()); } Vector sitesToCapture = new Vector(); for(int i=0;i<Chromosome.getSize();i++) { sitesToCapture.add(Chromosome.getMarker(i)); } for (int i = 0; i < forceIncludeTags.size(); i++) { String s = (String) forceIncludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced included tags since I don't know about it."); } } for (int i = 0; i < forceExcludeTags.size(); i++) { String s = (String) forceExcludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced excluded tags since I don't know about it."); } } //chuck out filtered jazz from excludes, and nonexistent markers from both forceExcludeTags.retainAll(filteredNames); forceIncludeTags.retainAll(names); if(!quietMode) { System.out.println("Starting tagging."); } TaggerController tc = new TaggerController(textData,forceIncludeTags,forceExcludeTags,sitesToCapture, tagging,maxNumTags,findTags); tc.runTagger(); while(!tc.isTaggingCompleted()) { try { Thread.sleep(100); }catch(InterruptedException ie) {} } tc.saveResultsToFile(validateOutputFile(fileName + ".TAGS")); tc.dumpTests(validateOutputFile(fileName + ".TESTS")); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } } |
OutputFile = validateOutputFile(fileName + ".4GAMblocks"); | outputFile = validateOutputFile(fileName + ".4GAMblocks"); | private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; AssociationTestSet customAssocSet; if(!quietMode && fileName != null){ System.out.println("Using data file: " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); //Vector result = null; if(fileType == HAPS_FILE){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED_FILE) { //read in ped file textData.linkageToChrom(inputFile, PED_FILE); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile textData.linkageToChrom(inputFile,HMP_FILE); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (fileType != HAPS_FILE){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } HashSet whiteListedCustomMarkers = new HashSet(); if (customAssocTestsFileName != null){ customAssocSet = new AssociationTestSet(customAssocTestsFileName); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } Hashtable snpsByName = new Hashtable(); for(int i=0;i<Chromosome.getUnfilteredSize();i++) { SNP snp = Chromosome.getUnfilteredMarker(i); snpsByName.put(snp.getName(), snp); } if(forceIncludeTags != null) { for(int i=0;i<forceIncludeTags.size();i++) { if(snpsByName.containsKey(forceIncludeTags.get(i))) { whiteListedCustomMarkers.add(snpsByName.get(forceIncludeTags.get(i))); } } } textData.setWhiteList(whiteListedCustomMarkers); boolean[] markerResults = new boolean[Chromosome.getUnfilteredSize()]; Vector result = null; if (fileType != HAPS_FILE){ result = textData.getPedFile().getResults(); //once check has been run we can filter the markers for (int i = 0; i < result.size(); i++){ if (((((MarkerResult)result.get(i)).getRating() > 0 || skipCheck) && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2)){ markerResults[i] = true; }else{ markerResults[i] = false; } } }else{ //we haven't done the check (HAPS files) Arrays.fill(markerResults, true); } for (int i = 0; i < excludedMarkers.size(); i++){ int cur = ((Integer)excludedMarkers.elementAt(i)).intValue(); if (cur < 1 || cur > markerResults.length){ System.out.println("Excluded marker out of bounds: " + cur + "\nMarkers must be between 1 and N, where N is the total number of markers."); System.exit(1); }else{ markerResults[cur-1] = false; } } for(int i=0;i<Chromosome.getUnfilteredSize();i++) { if(textData.isWhiteListed(Chromosome.getUnfilteredMarker(i))) { markerResults[i] = true; } } Chromosome.doFilter(markerResults); if(!quietMode && infoFile != null){ System.out.println("Using marker information file: " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); AssociationTestSet blockTestSet = null; if(blockOutputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; Haplotype[][] filtHaplos; switch(blockOutputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); if(!quietMode) { System.out.println("Using custom blocks file " + blockFileName); } cust = textData.readBlocks(blocksFile); break; case BLOX_ALL: //handled below, so we don't do anything here OutputFile = null; break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL if(blockOutputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid Gabriel blocks."); } OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile);; }else if (!quietMode){ System.out.println("Skipping block output: no valid 4 Gamete blocks."); } OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid LD Spine blocks."); } }else{ //guesses blocks based on output type determined above. textData.guessBlocks(blockOutputType, cust); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid blocks."); } } if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { if (blockOutputType == BLOX_ALL){ System.out.println("Haplotype association results cannot be used with block output \"ALL\""); }else{ if (haplos != null){ blockTestSet = new AssociationTestSet(haplos,null); blockTestSet.saveHapsToText(validateOutputFile(fileName + ".HAPASSOC")); }else if (!quietMode){ System.out.println("Skipping block association output: no valid blocks."); } } } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); if(!quietMode) { System.out.println("Using analysis track file " + trackFileName); } } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getUnfilteredSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getAbsolutePath()); }catch(JimiException je){ System.out.println(je.getMessage()); } } AssociationTestSet markerTestSet =null; if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC){ if (randomizeAffection){ Vector aff = new Vector(); int j=0, k=0; for (int i = 0; i < textData.getPedFile().getNumIndividuals(); i++){ if (i%2 == 0){ aff.add(new Integer(1)); j++; }else{ aff.add(new Integer(2)); k++; } } Collections.shuffle(aff); markerTestSet = new AssociationTestSet(textData.getPedFile(),aff,Chromosome.getAllMarkers()); }else{ markerTestSet = new AssociationTestSet(textData.getPedFile(),null,Chromosome.getAllMarkers()); } markerTestSet.saveSNPsToText(validateOutputFile(fileName + ".ASSOC")); } if(customAssocSet != null) { if(!quietMode) { System.out.println("Using custom association test file " + customAssocTestsFileName); } try { customAssocSet.setPermTests(doPermutationTest); customAssocSet.runFileTests(textData,markerTestSet.getMarkerAssociationResults()); customAssocSet.saveResultsToText(validateOutputFile(fileName + ".CUSTASSOC")); }catch(IOException ioe) { System.out.println("An error occured writing the custom association results file."); customAssocSet = null; } } if(doPermutationTest) { AssociationTestSet permTests = new AssociationTestSet(); permTests.cat(markerTestSet); if(blockTestSet != null) { permTests.cat(blockTestSet); } final PermutationTestSet pts = new PermutationTestSet(permutationCount,textData.getPedFile(),customAssocSet,permTests); Thread permThread = new Thread(new Runnable() { public void run() { if (pts.isCustom()){ pts.doPermutations(PermutationTestSet.CUSTOM); }else{ pts.doPermutations(PermutationTestSet.SINGLE_PLUS_BLOCKS); } } }); permThread.start(); if(!quietMode) { System.out.println("Starting " + permutationCount + " permutation tests (each . printed represents 1% of tests completed)"); } int dotsPrinted =0; while(pts.getPermutationCount() - pts.getPermutationsPerformed() > 0) { while(( (double)pts.getPermutationsPerformed() / pts.getPermutationCount())*100 > dotsPrinted) { System.out.print("."); dotsPrinted++; } try{ Thread.sleep(100); }catch(InterruptedException ie) {} } System.out.println(); try { pts.writeResultsToFile(validateOutputFile(fileName + ".PERMUT")); } catch(IOException ioe) { System.out.println("An error occured while writing the permutation test results to file."); } } if(tagging != Tagger.NONE) { if(textData.dpTable == null) { textData.generateDPrimeTable(); } Vector snps = Chromosome.getAllMarkers(); HashSet names = new HashSet(); for (int i = 0; i < snps.size(); i++) { SNP snp = (SNP) snps.elementAt(i); names.add(snp.getName()); } HashSet filteredNames = new HashSet(); for(int i=0;i<Chromosome.getSize();i++) { filteredNames.add(Chromosome.getMarker(i).getName()); } Vector sitesToCapture = new Vector(); for(int i=0;i<Chromosome.getSize();i++) { sitesToCapture.add(Chromosome.getMarker(i)); } for (int i = 0; i < forceIncludeTags.size(); i++) { String s = (String) forceIncludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced included tags since I don't know about it."); } } for (int i = 0; i < forceExcludeTags.size(); i++) { String s = (String) forceExcludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced excluded tags since I don't know about it."); } } //chuck out filtered jazz from excludes, and nonexistent markers from both forceExcludeTags.retainAll(filteredNames); forceIncludeTags.retainAll(names); if(!quietMode) { System.out.println("Starting tagging."); } TaggerController tc = new TaggerController(textData,forceIncludeTags,forceExcludeTags,sitesToCapture, tagging,maxNumTags,findTags); tc.runTagger(); while(!tc.isTaggingCompleted()) { try { Thread.sleep(100); }catch(InterruptedException ie) {} } tc.saveResultsToFile(validateOutputFile(fileName + ".TAGS")); tc.dumpTests(validateOutputFile(fileName + ".TESTS")); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } } |
OutputFile = validateOutputFile(fileName + ".SPINEblocks"); | outputFile = validateOutputFile(fileName + ".SPINEblocks"); | private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; AssociationTestSet customAssocSet; if(!quietMode && fileName != null){ System.out.println("Using data file: " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); //Vector result = null; if(fileType == HAPS_FILE){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED_FILE) { //read in ped file textData.linkageToChrom(inputFile, PED_FILE); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile textData.linkageToChrom(inputFile,HMP_FILE); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (fileType != HAPS_FILE){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } HashSet whiteListedCustomMarkers = new HashSet(); if (customAssocTestsFileName != null){ customAssocSet = new AssociationTestSet(customAssocTestsFileName); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } Hashtable snpsByName = new Hashtable(); for(int i=0;i<Chromosome.getUnfilteredSize();i++) { SNP snp = Chromosome.getUnfilteredMarker(i); snpsByName.put(snp.getName(), snp); } if(forceIncludeTags != null) { for(int i=0;i<forceIncludeTags.size();i++) { if(snpsByName.containsKey(forceIncludeTags.get(i))) { whiteListedCustomMarkers.add(snpsByName.get(forceIncludeTags.get(i))); } } } textData.setWhiteList(whiteListedCustomMarkers); boolean[] markerResults = new boolean[Chromosome.getUnfilteredSize()]; Vector result = null; if (fileType != HAPS_FILE){ result = textData.getPedFile().getResults(); //once check has been run we can filter the markers for (int i = 0; i < result.size(); i++){ if (((((MarkerResult)result.get(i)).getRating() > 0 || skipCheck) && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2)){ markerResults[i] = true; }else{ markerResults[i] = false; } } }else{ //we haven't done the check (HAPS files) Arrays.fill(markerResults, true); } for (int i = 0; i < excludedMarkers.size(); i++){ int cur = ((Integer)excludedMarkers.elementAt(i)).intValue(); if (cur < 1 || cur > markerResults.length){ System.out.println("Excluded marker out of bounds: " + cur + "\nMarkers must be between 1 and N, where N is the total number of markers."); System.exit(1); }else{ markerResults[cur-1] = false; } } for(int i=0;i<Chromosome.getUnfilteredSize();i++) { if(textData.isWhiteListed(Chromosome.getUnfilteredMarker(i))) { markerResults[i] = true; } } Chromosome.doFilter(markerResults); if(!quietMode && infoFile != null){ System.out.println("Using marker information file: " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); AssociationTestSet blockTestSet = null; if(blockOutputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; Haplotype[][] filtHaplos; switch(blockOutputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); if(!quietMode) { System.out.println("Using custom blocks file " + blockFileName); } cust = textData.readBlocks(blocksFile); break; case BLOX_ALL: //handled below, so we don't do anything here OutputFile = null; break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL if(blockOutputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid Gabriel blocks."); } OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile);; }else if (!quietMode){ System.out.println("Skipping block output: no valid 4 Gamete blocks."); } OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid LD Spine blocks."); } }else{ //guesses blocks based on output type determined above. textData.guessBlocks(blockOutputType, cust); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid blocks."); } } if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { if (blockOutputType == BLOX_ALL){ System.out.println("Haplotype association results cannot be used with block output \"ALL\""); }else{ if (haplos != null){ blockTestSet = new AssociationTestSet(haplos,null); blockTestSet.saveHapsToText(validateOutputFile(fileName + ".HAPASSOC")); }else if (!quietMode){ System.out.println("Skipping block association output: no valid blocks."); } } } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); if(!quietMode) { System.out.println("Using analysis track file " + trackFileName); } } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getUnfilteredSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getAbsolutePath()); }catch(JimiException je){ System.out.println(je.getMessage()); } } AssociationTestSet markerTestSet =null; if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC){ if (randomizeAffection){ Vector aff = new Vector(); int j=0, k=0; for (int i = 0; i < textData.getPedFile().getNumIndividuals(); i++){ if (i%2 == 0){ aff.add(new Integer(1)); j++; }else{ aff.add(new Integer(2)); k++; } } Collections.shuffle(aff); markerTestSet = new AssociationTestSet(textData.getPedFile(),aff,Chromosome.getAllMarkers()); }else{ markerTestSet = new AssociationTestSet(textData.getPedFile(),null,Chromosome.getAllMarkers()); } markerTestSet.saveSNPsToText(validateOutputFile(fileName + ".ASSOC")); } if(customAssocSet != null) { if(!quietMode) { System.out.println("Using custom association test file " + customAssocTestsFileName); } try { customAssocSet.setPermTests(doPermutationTest); customAssocSet.runFileTests(textData,markerTestSet.getMarkerAssociationResults()); customAssocSet.saveResultsToText(validateOutputFile(fileName + ".CUSTASSOC")); }catch(IOException ioe) { System.out.println("An error occured writing the custom association results file."); customAssocSet = null; } } if(doPermutationTest) { AssociationTestSet permTests = new AssociationTestSet(); permTests.cat(markerTestSet); if(blockTestSet != null) { permTests.cat(blockTestSet); } final PermutationTestSet pts = new PermutationTestSet(permutationCount,textData.getPedFile(),customAssocSet,permTests); Thread permThread = new Thread(new Runnable() { public void run() { if (pts.isCustom()){ pts.doPermutations(PermutationTestSet.CUSTOM); }else{ pts.doPermutations(PermutationTestSet.SINGLE_PLUS_BLOCKS); } } }); permThread.start(); if(!quietMode) { System.out.println("Starting " + permutationCount + " permutation tests (each . printed represents 1% of tests completed)"); } int dotsPrinted =0; while(pts.getPermutationCount() - pts.getPermutationsPerformed() > 0) { while(( (double)pts.getPermutationsPerformed() / pts.getPermutationCount())*100 > dotsPrinted) { System.out.print("."); dotsPrinted++; } try{ Thread.sleep(100); }catch(InterruptedException ie) {} } System.out.println(); try { pts.writeResultsToFile(validateOutputFile(fileName + ".PERMUT")); } catch(IOException ioe) { System.out.println("An error occured while writing the permutation test results to file."); } } if(tagging != Tagger.NONE) { if(textData.dpTable == null) { textData.generateDPrimeTable(); } Vector snps = Chromosome.getAllMarkers(); HashSet names = new HashSet(); for (int i = 0; i < snps.size(); i++) { SNP snp = (SNP) snps.elementAt(i); names.add(snp.getName()); } HashSet filteredNames = new HashSet(); for(int i=0;i<Chromosome.getSize();i++) { filteredNames.add(Chromosome.getMarker(i).getName()); } Vector sitesToCapture = new Vector(); for(int i=0;i<Chromosome.getSize();i++) { sitesToCapture.add(Chromosome.getMarker(i)); } for (int i = 0; i < forceIncludeTags.size(); i++) { String s = (String) forceIncludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced included tags since I don't know about it."); } } for (int i = 0; i < forceExcludeTags.size(); i++) { String s = (String) forceExcludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced excluded tags since I don't know about it."); } } //chuck out filtered jazz from excludes, and nonexistent markers from both forceExcludeTags.retainAll(filteredNames); forceIncludeTags.retainAll(names); if(!quietMode) { System.out.println("Starting tagging."); } TaggerController tc = new TaggerController(textData,forceIncludeTags,forceExcludeTags,sitesToCapture, tagging,maxNumTags,findTags); tc.runTagger(); while(!tc.isTaggingCompleted()) { try { Thread.sleep(100); }catch(InterruptedException ie) {} } tc.saveResultsToFile(validateOutputFile(fileName + ".TAGS")); tc.dumpTests(validateOutputFile(fileName + ".TESTS")); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } } |
OutputFile = validateOutputFile(fileName + ".CUSTblocks"); | outputFile = validateOutputFile(fileName + ".CUSTblocks"); | private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; AssociationTestSet customAssocSet; if(!quietMode && fileName != null){ System.out.println("Using data file: " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); //Vector result = null; if(fileType == HAPS_FILE){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED_FILE) { //read in ped file textData.linkageToChrom(inputFile, PED_FILE); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile textData.linkageToChrom(inputFile,HMP_FILE); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (fileType != HAPS_FILE){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } HashSet whiteListedCustomMarkers = new HashSet(); if (customAssocTestsFileName != null){ customAssocSet = new AssociationTestSet(customAssocTestsFileName); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } Hashtable snpsByName = new Hashtable(); for(int i=0;i<Chromosome.getUnfilteredSize();i++) { SNP snp = Chromosome.getUnfilteredMarker(i); snpsByName.put(snp.getName(), snp); } if(forceIncludeTags != null) { for(int i=0;i<forceIncludeTags.size();i++) { if(snpsByName.containsKey(forceIncludeTags.get(i))) { whiteListedCustomMarkers.add(snpsByName.get(forceIncludeTags.get(i))); } } } textData.setWhiteList(whiteListedCustomMarkers); boolean[] markerResults = new boolean[Chromosome.getUnfilteredSize()]; Vector result = null; if (fileType != HAPS_FILE){ result = textData.getPedFile().getResults(); //once check has been run we can filter the markers for (int i = 0; i < result.size(); i++){ if (((((MarkerResult)result.get(i)).getRating() > 0 || skipCheck) && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2)){ markerResults[i] = true; }else{ markerResults[i] = false; } } }else{ //we haven't done the check (HAPS files) Arrays.fill(markerResults, true); } for (int i = 0; i < excludedMarkers.size(); i++){ int cur = ((Integer)excludedMarkers.elementAt(i)).intValue(); if (cur < 1 || cur > markerResults.length){ System.out.println("Excluded marker out of bounds: " + cur + "\nMarkers must be between 1 and N, where N is the total number of markers."); System.exit(1); }else{ markerResults[cur-1] = false; } } for(int i=0;i<Chromosome.getUnfilteredSize();i++) { if(textData.isWhiteListed(Chromosome.getUnfilteredMarker(i))) { markerResults[i] = true; } } Chromosome.doFilter(markerResults); if(!quietMode && infoFile != null){ System.out.println("Using marker information file: " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); AssociationTestSet blockTestSet = null; if(blockOutputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; Haplotype[][] filtHaplos; switch(blockOutputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); if(!quietMode) { System.out.println("Using custom blocks file " + blockFileName); } cust = textData.readBlocks(blocksFile); break; case BLOX_ALL: //handled below, so we don't do anything here OutputFile = null; break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL if(blockOutputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid Gabriel blocks."); } OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile);; }else if (!quietMode){ System.out.println("Skipping block output: no valid 4 Gamete blocks."); } OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid LD Spine blocks."); } }else{ //guesses blocks based on output type determined above. textData.guessBlocks(blockOutputType, cust); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid blocks."); } } if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { if (blockOutputType == BLOX_ALL){ System.out.println("Haplotype association results cannot be used with block output \"ALL\""); }else{ if (haplos != null){ blockTestSet = new AssociationTestSet(haplos,null); blockTestSet.saveHapsToText(validateOutputFile(fileName + ".HAPASSOC")); }else if (!quietMode){ System.out.println("Skipping block association output: no valid blocks."); } } } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); if(!quietMode) { System.out.println("Using analysis track file " + trackFileName); } } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getUnfilteredSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getAbsolutePath()); }catch(JimiException je){ System.out.println(je.getMessage()); } } AssociationTestSet markerTestSet =null; if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC){ if (randomizeAffection){ Vector aff = new Vector(); int j=0, k=0; for (int i = 0; i < textData.getPedFile().getNumIndividuals(); i++){ if (i%2 == 0){ aff.add(new Integer(1)); j++; }else{ aff.add(new Integer(2)); k++; } } Collections.shuffle(aff); markerTestSet = new AssociationTestSet(textData.getPedFile(),aff,Chromosome.getAllMarkers()); }else{ markerTestSet = new AssociationTestSet(textData.getPedFile(),null,Chromosome.getAllMarkers()); } markerTestSet.saveSNPsToText(validateOutputFile(fileName + ".ASSOC")); } if(customAssocSet != null) { if(!quietMode) { System.out.println("Using custom association test file " + customAssocTestsFileName); } try { customAssocSet.setPermTests(doPermutationTest); customAssocSet.runFileTests(textData,markerTestSet.getMarkerAssociationResults()); customAssocSet.saveResultsToText(validateOutputFile(fileName + ".CUSTASSOC")); }catch(IOException ioe) { System.out.println("An error occured writing the custom association results file."); customAssocSet = null; } } if(doPermutationTest) { AssociationTestSet permTests = new AssociationTestSet(); permTests.cat(markerTestSet); if(blockTestSet != null) { permTests.cat(blockTestSet); } final PermutationTestSet pts = new PermutationTestSet(permutationCount,textData.getPedFile(),customAssocSet,permTests); Thread permThread = new Thread(new Runnable() { public void run() { if (pts.isCustom()){ pts.doPermutations(PermutationTestSet.CUSTOM); }else{ pts.doPermutations(PermutationTestSet.SINGLE_PLUS_BLOCKS); } } }); permThread.start(); if(!quietMode) { System.out.println("Starting " + permutationCount + " permutation tests (each . printed represents 1% of tests completed)"); } int dotsPrinted =0; while(pts.getPermutationCount() - pts.getPermutationsPerformed() > 0) { while(( (double)pts.getPermutationsPerformed() / pts.getPermutationCount())*100 > dotsPrinted) { System.out.print("."); dotsPrinted++; } try{ Thread.sleep(100); }catch(InterruptedException ie) {} } System.out.println(); try { pts.writeResultsToFile(validateOutputFile(fileName + ".PERMUT")); } catch(IOException ioe) { System.out.println("An error occured while writing the permutation test results to file."); } } if(tagging != Tagger.NONE) { if(textData.dpTable == null) { textData.generateDPrimeTable(); } Vector snps = Chromosome.getAllMarkers(); HashSet names = new HashSet(); for (int i = 0; i < snps.size(); i++) { SNP snp = (SNP) snps.elementAt(i); names.add(snp.getName()); } HashSet filteredNames = new HashSet(); for(int i=0;i<Chromosome.getSize();i++) { filteredNames.add(Chromosome.getMarker(i).getName()); } Vector sitesToCapture = new Vector(); for(int i=0;i<Chromosome.getSize();i++) { sitesToCapture.add(Chromosome.getMarker(i)); } for (int i = 0; i < forceIncludeTags.size(); i++) { String s = (String) forceIncludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced included tags since I don't know about it."); } } for (int i = 0; i < forceExcludeTags.size(); i++) { String s = (String) forceExcludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced excluded tags since I don't know about it."); } } //chuck out filtered jazz from excludes, and nonexistent markers from both forceExcludeTags.retainAll(filteredNames); forceIncludeTags.retainAll(names); if(!quietMode) { System.out.println("Starting tagging."); } TaggerController tc = new TaggerController(textData,forceIncludeTags,forceExcludeTags,sitesToCapture, tagging,maxNumTags,findTags); tc.runTagger(); while(!tc.isTaggingCompleted()) { try { Thread.sleep(100); }catch(InterruptedException ie) {} } tc.saveResultsToFile(validateOutputFile(fileName + ".TAGS")); tc.dumpTests(validateOutputFile(fileName + ".TESTS")); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } } |
OutputFile = null; | outputFile = null; | private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File OutputFile; File inputFile; AssociationTestSet customAssocSet; if(!quietMode && fileName != null){ System.out.println("Using data file: " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); //Vector result = null; if(fileType == HAPS_FILE){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED_FILE) { //read in ped file textData.linkageToChrom(inputFile, PED_FILE); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile textData.linkageToChrom(inputFile,HMP_FILE); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } if (fileType != HAPS_FILE){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } HashSet whiteListedCustomMarkers = new HashSet(); if (customAssocTestsFileName != null){ customAssocSet = new AssociationTestSet(customAssocTestsFileName); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } Hashtable snpsByName = new Hashtable(); for(int i=0;i<Chromosome.getUnfilteredSize();i++) { SNP snp = Chromosome.getUnfilteredMarker(i); snpsByName.put(snp.getName(), snp); } if(forceIncludeTags != null) { for(int i=0;i<forceIncludeTags.size();i++) { if(snpsByName.containsKey(forceIncludeTags.get(i))) { whiteListedCustomMarkers.add(snpsByName.get(forceIncludeTags.get(i))); } } } textData.setWhiteList(whiteListedCustomMarkers); boolean[] markerResults = new boolean[Chromosome.getUnfilteredSize()]; Vector result = null; if (fileType != HAPS_FILE){ result = textData.getPedFile().getResults(); //once check has been run we can filter the markers for (int i = 0; i < result.size(); i++){ if (((((MarkerResult)result.get(i)).getRating() > 0 || skipCheck) && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2)){ markerResults[i] = true; }else{ markerResults[i] = false; } } }else{ //we haven't done the check (HAPS files) Arrays.fill(markerResults, true); } for (int i = 0; i < excludedMarkers.size(); i++){ int cur = ((Integer)excludedMarkers.elementAt(i)).intValue(); if (cur < 1 || cur > markerResults.length){ System.out.println("Excluded marker out of bounds: " + cur + "\nMarkers must be between 1 and N, where N is the total number of markers."); System.exit(1); }else{ markerResults[cur-1] = false; } } for(int i=0;i<Chromosome.getUnfilteredSize();i++) { if(textData.isWhiteListed(Chromosome.getUnfilteredMarker(i))) { markerResults[i] = true; } } Chromosome.doFilter(markerResults); if(!quietMode && infoFile != null){ System.out.println("Using marker information file: " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } Vector cust = new Vector(); AssociationTestSet blockTestSet = null; if(blockOutputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; Haplotype[][] filtHaplos; switch(blockOutputType){ case BLOX_GABRIEL: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); if(!quietMode) { System.out.println("Using custom blocks file " + blockFileName); } cust = textData.readBlocks(blocksFile); break; case BLOX_ALL: //handled below, so we don't do anything here OutputFile = null; break; default: OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL if(blockOutputType == BLOX_ALL) { OutputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid Gabriel blocks."); } OutputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile);; }else if (!quietMode){ System.out.println("Skipping block output: no valid 4 Gamete blocks."); } OutputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid LD Spine blocks."); } }else{ //guesses blocks based on output type determined above. textData.guessBlocks(blockOutputType, cust); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), OutputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid blocks."); } } if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { if (blockOutputType == BLOX_ALL){ System.out.println("Haplotype association results cannot be used with block output \"ALL\""); }else{ if (haplos != null){ blockTestSet = new AssociationTestSet(haplos,null); blockTestSet.saveHapsToText(validateOutputFile(fileName + ".HAPASSOC")); }else if (!quietMode){ System.out.println("Skipping block association output: no valid blocks."); } } } } if(outputDprime) { OutputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ OutputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); if(!quietMode) { System.out.println("Using analysis track file " + trackFileName); } } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getUnfilteredSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, OutputFile.getAbsolutePath()); }catch(JimiException je){ System.out.println(je.getMessage()); } } AssociationTestSet markerTestSet =null; if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC){ if (randomizeAffection){ Vector aff = new Vector(); int j=0, k=0; for (int i = 0; i < textData.getPedFile().getNumIndividuals(); i++){ if (i%2 == 0){ aff.add(new Integer(1)); j++; }else{ aff.add(new Integer(2)); k++; } } Collections.shuffle(aff); markerTestSet = new AssociationTestSet(textData.getPedFile(),aff,Chromosome.getAllMarkers()); }else{ markerTestSet = new AssociationTestSet(textData.getPedFile(),null,Chromosome.getAllMarkers()); } markerTestSet.saveSNPsToText(validateOutputFile(fileName + ".ASSOC")); } if(customAssocSet != null) { if(!quietMode) { System.out.println("Using custom association test file " + customAssocTestsFileName); } try { customAssocSet.setPermTests(doPermutationTest); customAssocSet.runFileTests(textData,markerTestSet.getMarkerAssociationResults()); customAssocSet.saveResultsToText(validateOutputFile(fileName + ".CUSTASSOC")); }catch(IOException ioe) { System.out.println("An error occured writing the custom association results file."); customAssocSet = null; } } if(doPermutationTest) { AssociationTestSet permTests = new AssociationTestSet(); permTests.cat(markerTestSet); if(blockTestSet != null) { permTests.cat(blockTestSet); } final PermutationTestSet pts = new PermutationTestSet(permutationCount,textData.getPedFile(),customAssocSet,permTests); Thread permThread = new Thread(new Runnable() { public void run() { if (pts.isCustom()){ pts.doPermutations(PermutationTestSet.CUSTOM); }else{ pts.doPermutations(PermutationTestSet.SINGLE_PLUS_BLOCKS); } } }); permThread.start(); if(!quietMode) { System.out.println("Starting " + permutationCount + " permutation tests (each . printed represents 1% of tests completed)"); } int dotsPrinted =0; while(pts.getPermutationCount() - pts.getPermutationsPerformed() > 0) { while(( (double)pts.getPermutationsPerformed() / pts.getPermutationCount())*100 > dotsPrinted) { System.out.print("."); dotsPrinted++; } try{ Thread.sleep(100); }catch(InterruptedException ie) {} } System.out.println(); try { pts.writeResultsToFile(validateOutputFile(fileName + ".PERMUT")); } catch(IOException ioe) { System.out.println("An error occured while writing the permutation test results to file."); } } if(tagging != Tagger.NONE) { if(textData.dpTable == null) { textData.generateDPrimeTable(); } Vector snps = Chromosome.getAllMarkers(); HashSet names = new HashSet(); for (int i = 0; i < snps.size(); i++) { SNP snp = (SNP) snps.elementAt(i); names.add(snp.getName()); } HashSet filteredNames = new HashSet(); for(int i=0;i<Chromosome.getSize();i++) { filteredNames.add(Chromosome.getMarker(i).getName()); } Vector sitesToCapture = new Vector(); for(int i=0;i<Chromosome.getSize();i++) { sitesToCapture.add(Chromosome.getMarker(i)); } for (int i = 0; i < forceIncludeTags.size(); i++) { String s = (String) forceIncludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced included tags since I don't know about it."); } } for (int i = 0; i < forceExcludeTags.size(); i++) { String s = (String) forceExcludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced excluded tags since I don't know about it."); } } //chuck out filtered jazz from excludes, and nonexistent markers from both forceExcludeTags.retainAll(filteredNames); forceIncludeTags.retainAll(names); if(!quietMode) { System.out.println("Starting tagging."); } TaggerController tc = new TaggerController(textData,forceIncludeTags,forceExcludeTags,sitesToCapture, tagging,maxNumTags,findTags); tc.runTagger(); while(!tc.isTaggingCompleted()) { try { Thread.sleep(100); }catch(InterruptedException ie) {} } tc.saveResultsToFile(validateOutputFile(fileName + ".TAGS")); tc.dumpTests(validateOutputFile(fileName + ".TESTS")); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.