rem
stringlengths
0
477k
add
stringlengths
0
313k
context
stringlengths
6
599k
return new DrawingMethods().dPrimeGetPreferredSize(table.length);
return new DrawingMethods().dPrimeGetPreferredSize(table.length,info);
public Dimension getPreferredSize(){ return new DrawingMethods().dPrimeGetPreferredSize(table.length); }
new DrawingMethods().dPrimeDraw(table, g);
new DrawingMethods().dPrimeDraw(table, info, vec, g);
public void paintComponent(Graphics g){ super.paintComponent(g); new DrawingMethods().dPrimeDraw(table, g); }
public CheckDataPanel(HaploData hd) throws IOException, PedFileException{
public CheckDataPanel(HaploData hd, boolean disp) throws IOException, PedFileException{
public CheckDataPanel(HaploData hd) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getMarker(i).getName()); tempVect.add(new Long(Chromosome.getMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings);
tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings);
public CheckDataPanel(HaploData hd) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getMarker(i).getName()); tempVect.add(new Long(Chromosome.getMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){
if (disp){ table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller);
public CheckDataPanel(HaploData hd) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getMarker(i).getName()); tempVect.add(new Long(Chromosome.getMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
}
public CheckDataPanel(HaploData hd) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getMarker(i).getName()); tempVect.add(new Long(Chromosome.getMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
int numCols = table.getColumnCount();
int numCols = tableModel.getColumnCount();
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = table.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(table.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < table.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(table.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
header.append(table.getColumnName(i)).append("\t");
header.append(tableModel.getColumnName(i)).append("\t");
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = table.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(table.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < table.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(table.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
for (int i = 0; i < table.getRowCount(); i++){
for (int i = 0; i < tableModel.getRowCount(); i++){
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = table.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(table.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < table.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(table.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
sb.append(table.getValueAt(i,j)).append("\t");
sb.append(tableModel.getValueAt(i,j)).append("\t");
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = table.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(table.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < table.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(table.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){
if (((Boolean)tableModel.getValueAt(i, numCols-1)).booleanValue()){
public void printTable(File outfile) throws IOException{ FileWriter checkWriter = null; if (outfile != null){ checkWriter = new FileWriter(outfile); } int numCols = table.getColumnCount(); StringBuffer header = new StringBuffer(); for (int i = 0; i < numCols; i++){ header.append(table.getColumnName(i)).append("\t"); } header.append("\n"); if (outfile != null){ checkWriter.write(header.toString()); }else{ System.out.print(header.toString()); } for (int i = 0; i < table.getRowCount(); i++){ StringBuffer sb = new StringBuffer(); //don't print the true/false vals in last column for (int j = 0; j < numCols-1; j++){ sb.append(table.getValueAt(i,j)).append("\t"); } //print BAD if last column is false if (((Boolean)table.getValueAt(i, numCols-1)).booleanValue()){ sb.append("\n"); }else{ sb.append("BAD\n"); } if (outfile != null){ checkWriter.write(sb.toString()); }else{ System.out.print(sb.toString()); } } if (outfile != null){ checkWriter.close(); } }
if (idx < 0) idx = tp.getModel().getChildCount();
if (idx < 0) idx = tp.getModel().getColumnsFolder().getChildCount();
public void actionPerformed(ActionEvent evt) { Selectable invoker = pp.getSelection(); if (invoker instanceof TablePane) { TablePane tp = (TablePane) invoker; int idx = tp.getSelectedColumnIndex(); try { if (idx < 0) idx = tp.getModel().getChildCount(); } catch (ArchitectException e) { idx = 0; } tp.getModel().addChild(idx, new SQLColumn(tp.getModel(), "new column", Types.INTEGER, "Integer", 10, 0, DatabaseMetaData.columnNullable, null, null, null, false)); } else { JOptionPane.showMessageDialog((JComponent) invoker, "The selected item type is not recognised"); } }
tp.getModel().addChild(idx, new SQLColumn(tp.getModel(), "new column", Types.INTEGER, "Integer", 10, 0, DatabaseMetaData.columnNullable, null, null, null, false));
tp.getModel().addColumn(idx, new SQLColumn(tp.getModel(), "new column", Types.INTEGER, "Integer", 10, 0, DatabaseMetaData.columnNullable, null, null, null, false));
public void actionPerformed(ActionEvent evt) { Selectable invoker = pp.getSelection(); if (invoker instanceof TablePane) { TablePane tp = (TablePane) invoker; int idx = tp.getSelectedColumnIndex(); try { if (idx < 0) idx = tp.getModel().getChildCount(); } catch (ArchitectException e) { idx = 0; } tp.getModel().addChild(idx, new SQLColumn(tp.getModel(), "new column", Types.INTEGER, "Integer", 10, 0, DatabaseMetaData.columnNullable, null, null, null, false)); } else { JOptionPane.showMessageDialog((JComponent) invoker, "The selected item type is not recognised"); } }
public static PlayPenComponentUI createUI(JComponent c) {
public static PlayPenComponentUI createUI(PlayPenComponent c) {
public static PlayPenComponentUI createUI(JComponent c) { logger.debug("Creating new IERelationshipUI for "+c); return new IERelationshipUI(); }
Map paramValues = config.getParamValues(); appForm.setJndiFactory((String)paramValues.get(ApplicationConfig.JNDI_FACTORY)); appForm.setJndiURL((String)paramValues.get(ApplicationConfig.JNDI_URL));
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception{ AccessController.checkAccess(context.getServiceContext(), ACL_EDIT_APPLICATIONS); ApplicationConfig config = context.getApplicationConfig(); ApplicationForm appForm = (ApplicationForm)actionForm; ApplicationType appType = config.getApplicationType(); MetaApplicationConfig metaAppConfig = appType.getModule().getMetaApplicationConfig(); /* populate the form */ appForm.setApplicationId(config.getApplicationId()); appForm.setName(config.getName()); appForm.setType(config.getType()); if(metaAppConfig.isDisplayHost()) appForm.setHost(config.getHost()); if(metaAppConfig.isDisplayPort()) appForm.setPort(String.valueOf(config.getPort())); if(metaAppConfig.isDisplayURL()) appForm.setURL(config.getURL()); if(metaAppConfig.isDisplayUsername()) appForm.setUsername(config.getUsername()); if(metaAppConfig.isDisplayPassword() && config.getPassword() != null && config.getPassword().length()>0) appForm.setPassword(ApplicationForm.FORM_PASSWORD); request.setAttribute(RequestAttributes.META_APP_CONFIG, metaAppConfig); /*set current page for navigation*/ request.setAttribute(RequestAttributes.NAV_CURRENT_PAGE, "Edit Application"); return mapping.findForward(Forwards.SUCCESS); }
if ( img.getWidth() > columnWidth || img.getHeight() > rowHeight ) { img = img.getSubimage( 0, 0, Math.min( img.getWidth(), columnWidth ), Math.min( img.getHeight(), rowHeight ) ); }
private void paintThumbnail( Graphics2D g2, PhotoInfo photo, int startx, int starty, boolean isSelected ) { log.debug( "paintThumbnail entry " + photo.getUid() ); long startTime = System.currentTimeMillis(); long thumbReadyTime = 0; long thumbDrawnTime = 0; long endTime = 0; // Current position in which attributes can be drawn int ypos = starty + rowHeight/2; // Create a transaction which will be used for persisten object operations // during painting (to avoid creating several short-livin transactions) ODMGXAWrapper txw = new ODMGXAWrapper(); Thumbnail thumbnail = null; log.debug( "finding thumb" ); boolean hasThumbnail = photo.hasThumbnail(); log.debug( "asked if has thumb" ); if ( hasThumbnail ) { log.debug( "Photo " + photo.getUid() + " has thumbnail" ); thumbnail = photo.getThumbnail(); log.debug( "got thumbnail" ); } else { thumbnail = Thumbnail.getDefaultThumbnail(); if ( !thumbCreatorThread.isBusy() ) { log.debug( "Create thumbnail for " + photo.getUid() ); thumbCreatorThread.createThumbnail( photo ); log.debug( "Thumbnail request submitted" ); } } thumbReadyTime = System.currentTimeMillis(); log.debug( "starting to draw" ); // Find the position for the thumbnail BufferedImage img = thumbnail.getImage(); int x = startx + (columnWidth - img.getWidth())/(int)2; int y = starty + (rowHeight - img.getHeight())/(int)2; log.debug( "drawing thumbnail" ); g2.drawImage( img, new AffineTransform( 1f, 0f, 0f, 1f, x, y ), null ); log.debug( "Drawn, drawing decorations" ); if ( isSelected ) { Stroke prevStroke = g2.getStroke(); Color prevColor = g2.getColor(); g2.setStroke( new BasicStroke( 3.0f) ); g2.setColor( Color.BLUE ); g2.drawRect( x, y, img.getWidth(), img.getHeight() ); g2.setColor( prevColor ); g2.setStroke( prevStroke ); } thumbDrawnTime = System.currentTimeMillis(); // Increase ypos so that attributes are drawn under the image ypos += ((int)img.getHeight())/2 + 4; // Draw the attributes // Draw the qualoity icon to the upper left corner of the thumbnail int quality = photo.getQuality(); if ( showQuality && quality != PhotoInfo.QUALITY_UNDEFINED ) { ImageIcon qualityIcon = qualityIcons[quality]; int qx = startx + (columnWidth-img.getWidth()-qualityIcon.getIconWidth())/(int)2; int qy = starty + (rowHeight-img.getHeight()-qualityIcon.getIconHeight())/(int)2; qualityIcon.paintIcon( this, g2, qx, qy ); } Color prevBkg = g2.getBackground(); if ( isSelected ) { g2.setBackground( Color.BLUE ); } else { g2.setBackground( this.getBackground() ); } Font attrFont = new Font( "Arial", Font.PLAIN, 10 ); FontRenderContext frc = g2.getFontRenderContext(); if ( showDate && photo.getShootTime() != null ) { FuzzyDate fd = new FuzzyDate( photo.getShootTime(), photo.getTimeAccuracy() ); String dateStr = fd.format(); TextLayout txt = new TextLayout( dateStr, attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth - bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } String shootPlace = photo.getShootingPlace(); if ( showPlace && shootPlace != null && shootPlace.length() > 0 ) { TextLayout txt = new TextLayout( photo.getShootingPlace(), attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth-bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } g2.setBackground( prevBkg ); txw.commit(); endTime = System.currentTimeMillis(); log.debug( "paintThumbnail: exit " + photo.getUid() ); log.debug( "Thumb fetch " + (thumbReadyTime - startTime ) + " ms" ); log.debug( "Thumb draw " + ( thumbDrawnTime - thumbReadyTime ) + " ms" ); log.debug( "Deacoration draw " + (endTime - thumbDrawnTime ) + " ms" ); log.debug( "Total " + (endTime - startTime ) + " ms" ); }
public void tallyCCInd(byte a1, byte a2, int cc){
public void tallyCCInd(byte[] a, int cc){ if (cc == 2) cc = 0; byte a1 = a[0]; byte a2 = a[1];
public void tallyCCInd(byte a1, byte a2, int cc){ //case = 0, control = 1 for int cc if (a1 >= 5 && a2 >= 5){ counts[cc][0]++; counts[cc][1]++; if (allele1 == 0){ allele1 = (byte)(a1 - 4); allele2 = (byte)(a2 - 4); } }else{ //seed the alleles as soon as they're found if (allele1 == 0){ allele1 = a1; if (a1 != a2){ allele2 = a2; } }else if (allele2 == 0){ if (a1 != allele1){ allele2 = a1; }else if (a2 != allele1){ allele2 = a2; } } if (a1 != 0){ if (a1 == allele1){ counts[cc][0] ++; }else{ counts[cc][1] ++; } } if (a2 != 0){ if (a2 == allele1){ counts[cc][0]++; }else{ counts[cc][1]++; } } } }
logger.debug("NEW TABLE "+name+"@"+hashCode());
public SQLTable(SQLDatabase parentDb, SQLObject parent, SQLCatalog catalog, SQLSchema schema, String name, String remarks, String objectType) { this.parentDatabase = parentDb; this.parent = parent; this.catalog = catalog; this.schema = schema; this.tableName = name; this.remarks = remarks; this.columnsPopulated = false; this.relationshipsPopulated = false; this.objectType = objectType; this.columns = new ArrayList(); this.importedKeys = new ArrayList(); this.exportedKeys = new ArrayList(); this.children = new ArrayList(); children.add(columnsFolder = new Folder("Columns", this, columns)); children.add(exportedKeysFolder = new Folder("Exported Keys", this, exportedKeys)); children.add(importedKeysFolder = new Folder("Imported Keys", this, importedKeys)); }
new String[] {"SYSTEM TABLE", "TABLE", "VIEW"});
new String[] {"TABLE", "VIEW"});
protected static void addTablesToDatabase(SQLDatabase addTo) throws SQLException, ArchitectException { HashMap catalogs = new HashMap(); HashMap schemas = new HashMap(); synchronized (addTo) { Connection con = addTo.getConnection(); DatabaseMetaData dbmd = con.getMetaData(); ResultSet mdTables = null; try { mdTables = dbmd.getTables(null, null, "%", new String[] {"SYSTEM TABLE", "TABLE", "VIEW"}); while (mdTables.next()) { SQLObject tableParent = addTo; String catName = mdTables.getString(1); SQLCatalog cat = null; if (catName != null) { cat = (SQLCatalog) catalogs.get(catName); if (cat == null) { cat = new SQLCatalog(addTo, catName); addTo.children.add(cat); catalogs.put(catName, cat); } tableParent = cat; } String schName = mdTables.getString(2); SQLSchema schema = null; if (schName != null) { schema = (SQLSchema) schemas.get(catName+"."+schName); if (schema == null) { if (cat == null) { schema = new SQLSchema(addTo, schName); addTo.children.add(schema); } else { schema = new SQLSchema(cat, schName); cat.children.add(schema); } schemas.put(catName+"."+schName, schema); } tableParent = schema; } tableParent.children.add(new SQLTable(addTo, tableParent, cat, schema, mdTables.getString(3), mdTables.getString(5), mdTables.getString(4) )); } } finally { if (mdTables != null) mdTables.close(); } } }
if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue());
if (bar != null) { if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue()); } bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false);
public void actionPerformed(ActionEvent evt) { // update the progress bar logger.debug("updating progress bar..."); try { Integer jobSize = monitorable.getJobSize(); if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue()); } if (label != null) { label.setVisible(true); } bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false); } catch (ArchitectException e) { logger.error("Couldn't update progress bar (Monitorable threw an exception)", e); } finally { logger.debug("all done, terminating timer thread..."); try { logger.debug("monitorable.isFinished():" + monitorable.isFinished()); if (monitorable.isFinished()) { if (label != null) { label.setVisible(false); } bar.setVisible(false); logger.debug("trying to stop timer thread..."); timer.stop(); logger.debug("did the timer thread stop???"); } } catch (ArchitectException e1) { logger.error("Couldn't tell if Monitorable was finished (it threw an exception)", e1); } } }
bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false);
if (pm != null) { if (jobSize != null) { pm.setMaximum(jobSize.intValue()); } pm.setProgress(monitorable.getProgress()); logger.debug("progress: " + monitorable.getProgress()); pm.setNote(monitorable.getMessage()); }
public void actionPerformed(ActionEvent evt) { // update the progress bar logger.debug("updating progress bar..."); try { Integer jobSize = monitorable.getJobSize(); if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue()); } if (label != null) { label.setVisible(true); } bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false); } catch (ArchitectException e) { logger.error("Couldn't update progress bar (Monitorable threw an exception)", e); } finally { logger.debug("all done, terminating timer thread..."); try { logger.debug("monitorable.isFinished():" + monitorable.isFinished()); if (monitorable.isFinished()) { if (label != null) { label.setVisible(false); } bar.setVisible(false); logger.debug("trying to stop timer thread..."); timer.stop(); logger.debug("did the timer thread stop???"); } } catch (ArchitectException e1) { logger.error("Couldn't tell if Monitorable was finished (it threw an exception)", e1); } } }
logger.debug("all done, terminating timer thread...");
public void actionPerformed(ActionEvent evt) { // update the progress bar logger.debug("updating progress bar..."); try { Integer jobSize = monitorable.getJobSize(); if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue()); } if (label != null) { label.setVisible(true); } bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false); } catch (ArchitectException e) { logger.error("Couldn't update progress bar (Monitorable threw an exception)", e); } finally { logger.debug("all done, terminating timer thread..."); try { logger.debug("monitorable.isFinished():" + monitorable.isFinished()); if (monitorable.isFinished()) { if (label != null) { label.setVisible(false); } bar.setVisible(false); logger.debug("trying to stop timer thread..."); timer.stop(); logger.debug("did the timer thread stop???"); } } catch (ArchitectException e1) { logger.error("Couldn't tell if Monitorable was finished (it threw an exception)", e1); } } }
bar.setVisible(false);
if (bar != null) { bar.setVisible(false); } if (pm != null) { logger.debug("pm done, max was: " + pm.getMaximum()); }
public void actionPerformed(ActionEvent evt) { // update the progress bar logger.debug("updating progress bar..."); try { Integer jobSize = monitorable.getJobSize(); if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue()); } if (label != null) { label.setVisible(true); } bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false); } catch (ArchitectException e) { logger.error("Couldn't update progress bar (Monitorable threw an exception)", e); } finally { logger.debug("all done, terminating timer thread..."); try { logger.debug("monitorable.isFinished():" + monitorable.isFinished()); if (monitorable.isFinished()) { if (label != null) { label.setVisible(false); } bar.setVisible(false); logger.debug("trying to stop timer thread..."); timer.stop(); logger.debug("did the timer thread stop???"); } } catch (ArchitectException e1) { logger.error("Couldn't tell if Monitorable was finished (it threw an exception)", e1); } } }
if (seq.equals(((TagSequence)o1).getSequence())){ return 1; }else if (seq.equals(((TagSequence)o2).getSequence())){ return -1; }
public int compare(Object o1, Object o2) { if(getPairwiseCompRsq(seq,((TagSequence)o1).getSequence()) == getPairwiseCompRsq(seq,((TagSequence)o2).getSequence())) { return 0; } else if (getPairwiseCompRsq(seq,((TagSequence)o1).getSequence()) > getPairwiseCompRsq(seq,((TagSequence)o2).getSequence())) { return 1; } else { return -1; } }
this(s,include,exclude,ac,DEFAULT_RSQ_CUTOFF,AGGRESSIVE_TRIPLE, DEFAULT_MAXDIST, DEFAULT_MAXNUMTAGS);
this(s,include,exclude,ac,DEFAULT_RSQ_CUTOFF,AGGRESSIVE_TRIPLE, DEFAULT_MAXDIST, DEFAULT_MAXNUMTAGS,true);
public Tagger(Vector s, Vector include, Vector exclude, AlleleCorrelator ac){ this(s,include,exclude,ac,DEFAULT_RSQ_CUTOFF,AGGRESSIVE_TRIPLE, DEFAULT_MAXDIST, DEFAULT_MAXNUMTAGS); }
pt.removeTagged(theTag.sequence);
private HashSet addTag(PotentialTag theTag,Hashtable potentialTagHash, Vector sitesToCapture) { Vector potentialTags = new Vector(potentialTagHash.values()); potentialTags.remove(theTag); potentialTagHash.remove(theTag.sequence); //newlyTagged contains alleles which were not tagged by anything in the set of tags before, //and are now tagged by theTag. HashSet newlyTagged = ((PotentialTag)theTag).tagged; TagSequence tagSeq = new TagSequence(theTag.sequence); tags.add(tagSeq); Iterator itr = potentialTagHash.keySet().iterator(); Vector toRemove = new Vector(); //iterate through the list of available tags, and remove the newly tagged alleles from //the list of alleles that each PotentialTag can tag. (since we want to choose our next tag // according to which will tag the most untagged alleles ) while(itr.hasNext()) { PotentialTag pt = (PotentialTag) potentialTagHash.get(itr.next()); pt.removeTagged(newlyTagged); pt.removeTagged(theTag.sequence); //if a PotentialTag cannot tag any other uncaptured sites, then we want to remove it from contention, //unless its sequence still needs to be captured. if(pt.taggedCount() == 0 && !sitesToCapture.contains(pt.sequence)) { toRemove.add(pt.sequence); } } for(int i=0;i<toRemove.size();i++) { potentialTags.remove(potentialTagHash.remove(toRemove.get(i))); } //loop through the list of alleles the newly added tag can capture, and //add them to the TagSequence object. //we add all the alleles the tag can capture, _not_ just the newly tagged alleles. Iterator ptitr = theTag.allTagged.iterator(); while(ptitr.hasNext()) { tagSeq.addTagged((VariantSequence)ptitr.next()); } return newlyTagged; }
if(pt.taggedCount() == 0 && !sitesToCapture.contains(pt.sequence)) {
if(pt.taggedCount() == 0){
private HashSet addTag(PotentialTag theTag,Hashtable potentialTagHash, Vector sitesToCapture) { Vector potentialTags = new Vector(potentialTagHash.values()); potentialTags.remove(theTag); potentialTagHash.remove(theTag.sequence); //newlyTagged contains alleles which were not tagged by anything in the set of tags before, //and are now tagged by theTag. HashSet newlyTagged = ((PotentialTag)theTag).tagged; TagSequence tagSeq = new TagSequence(theTag.sequence); tags.add(tagSeq); Iterator itr = potentialTagHash.keySet().iterator(); Vector toRemove = new Vector(); //iterate through the list of available tags, and remove the newly tagged alleles from //the list of alleles that each PotentialTag can tag. (since we want to choose our next tag // according to which will tag the most untagged alleles ) while(itr.hasNext()) { PotentialTag pt = (PotentialTag) potentialTagHash.get(itr.next()); pt.removeTagged(newlyTagged); pt.removeTagged(theTag.sequence); //if a PotentialTag cannot tag any other uncaptured sites, then we want to remove it from contention, //unless its sequence still needs to be captured. if(pt.taggedCount() == 0 && !sitesToCapture.contains(pt.sequence)) { toRemove.add(pt.sequence); } } for(int i=0;i<toRemove.size();i++) { potentialTags.remove(potentialTagHash.remove(toRemove.get(i))); } //loop through the list of alleles the newly added tag can capture, and //add them to the TagSequence object. //we add all the alleles the tag can capture, _not_ just the newly tagged alleles. Iterator ptitr = theTag.allTagged.iterator(); while(ptitr.hasNext()) { tagSeq.addTagged((VariantSequence)ptitr.next()); } return newlyTagged; }
return createTime;
return createTime != null ? (Date) createTime.clone() : null;
public Date getCreateTime() { return createTime; }
public void run(Context context, XMLOutput output) throws Exception {
public void run(JellyContext context, XMLOutput output) throws Exception {
public void run(Context context, XMLOutput output) throws Exception { for ( int i = 0, size = scripts.length; i < size; i++ ) { Script script = scripts[i]; script.run( context, output ); } }
Enumeration enum = props.propertyNames(); while (enum.hasMoreElements()) { String key = (String) enum.nextElement();
Enumeration propsEnum = props.propertyNames(); while (propsEnum.hasMoreElements()) { String key = (String) propsEnum.nextElement();
public void doTag(final XMLOutput output) throws JellyTagException { if (file == null && uri == null) { throw new JellyTagException("This tag must define a 'file' or 'uri' attribute"); } InputStream is = null; if (file != null) { File f = new File(file); if (!f.exists()) { throw new JellyTagException("file: " + file + " does not exist!"); } try { is = new FileInputStream(f); } catch (FileNotFoundException e) { throw new JellyTagException(e); } } else { is = context.getResourceAsStream(uri); if (is == null) { throw new JellyTagException( "Could not find: " + uri ); } } Properties props = new Properties(); try { props.load(is); } catch (IOException e) { throw new JellyTagException("properties tag could not load from file",e); } if (var != null) { context.setVariable(var, props); } else { Enumeration enum = props.propertyNames(); while (enum.hasMoreElements()) { String key = (String) enum.nextElement(); String value = props.getProperty(key); // @todo we should parse the value in case its an Expression context.setVariable(key, value); } }
}
public void doTag(final XMLOutput output) throws JellyTagException { if (file == null && uri == null) { throw new JellyTagException("This tag must define a 'file' or 'uri' attribute"); } InputStream is = null; if (file != null) { File f = new File(file); if (!f.exists()) { throw new JellyTagException("file: " + file + " does not exist!"); } try { is = new FileInputStream(f); } catch (FileNotFoundException e) { throw new JellyTagException(e); } } else { is = context.getResourceAsStream(uri); if (is == null) { throw new JellyTagException( "Could not find: " + uri ); } } Properties props = new Properties(); try { props.load(is); } catch (IOException e) { throw new JellyTagException("properties tag could not load from file",e); } if (var != null) { context.setVariable(var, props); } else { Enumeration enum = props.propertyNames(); while (enum.hasMoreElements()) { String key = (String) enum.nextElement(); String value = props.getProperty(key); // @todo we should parse the value in case its an Expression context.setVariable(key, value); } }
Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); class SortingHelper implements Comparable{ long pos; int orderInFile; public SortingHelper(long pos, int order){ this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } } } boolean needSort = false; Vector sortHelpers = new Vector(); for (int k = 0; k < (numLines); k++){ sortHelpers.add(new SortingHelper(Long.parseLong((String)positions.get(k)),k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Collections.sort(sortHelpers); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[sortHelpers.size()]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<sortHelpers.size();i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<sortHelpers.size();i++){ tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } }
else if (infile != null){ int numLines = names.size(); class SortingHelper implements Comparable{ long pos; int orderInFile; public SortingHelper(long pos, int order){ this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0;
Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true;
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); class SortingHelper implements Comparable{ long pos; int orderInFile; public SortingHelper(long pos, int order){ this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } } } boolean needSort = false; Vector sortHelpers = new Vector(); for (int k = 0; k < (numLines); k++){ sortHelpers.add(new SortingHelper(Long.parseLong((String)positions.get(k)),k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Collections.sort(sortHelpers); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[sortHelpers.size()]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<sortHelpers.size();i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<sortHelpers.size();i++){ tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } }
} } boolean needSort = false; Vector sortHelpers = new Vector(); for (int k = 0; k < (numLines); k++){ sortHelpers.add(new SortingHelper(Long.parseLong((String)positions.get(k)),k)); } for (int k = 1; k < (numLines); k++){ if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { needSort = true; break; } } if(needSort) { Collections.sort(sortHelpers); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[sortHelpers.size()]; for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<sortHelpers.size();i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<sortHelpers.size();i++){ tempChrom.setGenotype(tempGenotype[i],i); } } Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } }
numdups++; } } } int numLines = names.size(); class SortingHelper implements Comparable{ long pos; int orderInFile; public SortingHelper(long pos, int order){ this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } } } boolean needSort = false; Vector sortHelpers = new Vector(); for (int k = 0; k < (numLines); k++){ sortHelpers.add(new SortingHelper(Long.parseLong((String)positions.get(k)),k)); } for (int k = 1; k < (numLines); k++){ if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { needSort = true; break; } } if(needSort){ Collections.sort(sortHelpers); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[sortHelpers.size()]; for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<sortHelpers.size();i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<sortHelpers.size();i++){ tempChrom.setGenotype(tempGenotype[i],i); } } Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (names.contains(name)){ dupCheck.add(name); } names.add(name); positions.add(l); extras.add(extra); } //check for duplicate names Iterator ditr = dupCheck.iterator(); while (ditr.hasNext()){ String n = (String) ditr.next(); int numdups = 1; for (int i = 0; i < names.size(); i++){ if (names.get(i).equals(n)){ //leave the first instance of the duplicate name the same if (numdups > 1){ String newName = n + "." + numdups; while (names.contains(newName)){ numdups++; newName = n + "." + numdups; } names.setElementAt(newName,i); dupNames = true; } numdups++; } } } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); class SortingHelper implements Comparable{ long pos; int orderInFile; public SortingHelper(long pos, int order){ this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } } } boolean needSort = false; Vector sortHelpers = new Vector(); for (int k = 0; k < (numLines); k++){ sortHelpers.add(new SortingHelper(Long.parseLong((String)positions.get(k)),k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Collections.sort(sortHelpers); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[sortHelpers.size()]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<sortHelpers.size();i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<sortHelpers.size();i++){ tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.elementAt(realPos[j])); } ind.setMarkers(sortedMarkers); } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Math.rint(mr.getMAF()*100.0)/100.0; }else{ maf = Math.rint(100.0*(numa2/(numa1+numa2)))/100.0; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } }
SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; }
SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0;
public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } }
}
public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } }
ServerConnection serverConnection = context.getServerConnection();
ServerConnection serverConnection = ServiceUtils.getServerConnectionEvenIfCluster( context.getApplicationConfig());
public ObjectInfo getMBeanInfo(ServiceContext context) throws ServiceException { canAccessThisMBean(context); ServerConnection serverConnection = context.getServerConnection(); ObjectInfo objectInfo = serverConnection.getObjectInfo(context.getObjectName()); return objectInfo; }
exportedKeys.add(r);
exportedKeysFolder.addChild(r);
public void addExportedKey(SQLRelationship r) { exportedKeys.add(r); }
importedKeys.add(r);
importedKeysFolder.addChild(r);
public void addImportedKey(SQLRelationship r) { importedKeys.add(r); }
populate();
public synchronized List getColumns() throws ArchitectException { return columnsFolder.getChildren(); }
r.getPkTable().removeExportedKey(r); logger.debug(r); } it = exportedKeys.iterator(); while (it.hasNext()) { SQLRelationship r = (SQLRelationship) it.next(); r.getFkTable().removeImportedKey(r);
public void removeDependencies() { Iterator it = importedKeys.iterator(); while (it.hasNext()) { SQLRelationship r = (SQLRelationship) it.next(); // FIXME: have to actually remove relationships logger.debug(r); } }
svnbranchLabel.setText( prop.getProperty( "svn.info.url", "unknown" ) );
svnbranchLabel.setText( prop.getProperty( "svn.url", "unknown" ) );
void setupInfo() { URL buildPropertyURL = AboutDlg.class.getClassLoader().getResource( "buildinfo.properties"); Properties prop = new Properties(); try { InputStream is = buildPropertyURL.openStream(); prop.load( is ); } catch (IOException e ) { // Cannot read the properties for some reason. // Do nothing, use the default values instead. } svnrevLabel.setText( prop.getProperty( "svn.revision", "unknown" ) ); svnbranchLabel.setText( prop.getProperty( "svn.info.url", "unknown" ) ); buildTimeLabel.setText( prop.getProperty( "build.time", "unknown" ) ); builderLabel.setText( prop.getProperty( "build.user", "unknown" ) ); String version = prop.getProperty( "build.version", "unknown" ); String versionTag = prop.getProperty( "build.version_tag", "unknown" ); versionLabel.setText( version + " (" + versionTag + ")" ); builderLabel.setText( prop.getProperty( "build.user", "unknown" ) ); // Set up the splash screen image URL splashImageURL = AboutDlg.class.getClassLoader().getResource( "splash.jpg" ); Icon splash = new ImageIcon( splashImageURL ); splashLabel.setIcon( splash ); splashLabel.setText( null ); // Set the copyright text URL copyrightTextURL = AboutDlg.class.getClassLoader().getResource( "copyright.html" ); try { copyrightTextPane.setPage( copyrightTextURL ); } catch ( IOException e ) {} }
String versionTag = prop.getProperty( "build.version_tag", "unknown" ); versionLabel.setText( version + " (" + versionTag + ")" );
String versionTag = prop.getProperty( "build.version_tag", "" ); if ( versionTag.length() > 0 ) { version = version + " (" + versionTag + ")"; } versionLabel.setText( version );
void setupInfo() { URL buildPropertyURL = AboutDlg.class.getClassLoader().getResource( "buildinfo.properties"); Properties prop = new Properties(); try { InputStream is = buildPropertyURL.openStream(); prop.load( is ); } catch (IOException e ) { // Cannot read the properties for some reason. // Do nothing, use the default values instead. } svnrevLabel.setText( prop.getProperty( "svn.revision", "unknown" ) ); svnbranchLabel.setText( prop.getProperty( "svn.info.url", "unknown" ) ); buildTimeLabel.setText( prop.getProperty( "build.time", "unknown" ) ); builderLabel.setText( prop.getProperty( "build.user", "unknown" ) ); String version = prop.getProperty( "build.version", "unknown" ); String versionTag = prop.getProperty( "build.version_tag", "unknown" ); versionLabel.setText( version + " (" + versionTag + ")" ); builderLabel.setText( prop.getProperty( "build.user", "unknown" ) ); // Set up the splash screen image URL splashImageURL = AboutDlg.class.getClassLoader().getResource( "splash.jpg" ); Icon splash = new ImageIcon( splashImageURL ); splashLabel.setIcon( splash ); splashLabel.setText( null ); // Set the copyright text URL copyrightTextURL = AboutDlg.class.getClassLoader().getResource( "copyright.html" ); try { copyrightTextPane.setPage( copyrightTextURL ); } catch ( IOException e ) {} }
lastSaveOpSuccessful = true;
lastSaveOpSuccessful = true; JOptionPane.showMessageDialog(ArchitectFrame.this, "Save successful");
public boolean saveOrSaveAs(boolean showChooser, boolean separateThread) { if (project.getFile() == null || showChooser) { JFileChooser chooser = new JFileChooser(project.getFile()); chooser.addChoosableFileFilter(ASUtils.ARCHITECT_FILE_FILTER); int response = chooser.showSaveDialog(ArchitectFrame.this); if (response != JFileChooser.APPROVE_OPTION) { return false; } else { File file = chooser.getSelectedFile(); if (!file.getPath().endsWith(".architect")) { file = new File(file.getPath()+".architect"); } if (file.exists()) { response = JOptionPane.showConfirmDialog( ArchitectFrame.this, "The file\n\n"+file.getPath()+"\n\nalready exists. Do you want to overwrite it?", "File Exists", JOptionPane.YES_NO_OPTION); if (response == JOptionPane.NO_OPTION) { return saveOrSaveAs(true, separateThread); } } project.setFile(file); String projName = file.getName().substring(0, file.getName().length()-".architect".length()); project.setName(projName); setTitle(projName); } } final boolean finalSeparateThread = separateThread; final ProgressMonitor pm = new ProgressMonitor (ArchitectFrame.this, "Saving Project", "", 0, 100); Runnable saveTask = new Runnable() { public void run() { try { lastSaveOpSuccessful = false; project.setSaveInProgress(true); project.save(finalSeparateThread ? pm : null); lastSaveOpSuccessful = true; } catch (Exception ex) { lastSaveOpSuccessful = false; JOptionPane.showMessageDialog (ArchitectFrame.this, "Can't save project: "+ex.getMessage()); logger.error("Got exception while saving project", ex); } finally { project.setSaveInProgress(false); } } }; if (separateThread) { new Thread(saveTask).start(); return true; // this is an optimistic lie } else { saveTask.run(); return lastSaveOpSuccessful; } }
lastSaveOpSuccessful = true;
lastSaveOpSuccessful = true; JOptionPane.showMessageDialog(ArchitectFrame.this, "Save successful");
public void run() { try { lastSaveOpSuccessful = false; project.setSaveInProgress(true); project.save(finalSeparateThread ? pm : null); lastSaveOpSuccessful = true; } catch (Exception ex) { lastSaveOpSuccessful = false; JOptionPane.showMessageDialog (ArchitectFrame.this, "Can't save project: "+ex.getMessage()); logger.error("Got exception while saving project", ex); } finally { project.setSaveInProgress(false); } }
public void run(Context context, XMLOutput output) throws Exception { for ( int i = 0, size = expressions.length; i < size; i++ ) { Expression expression = expressions[i]; Method method = methods[i]; Class type = types[i]; Object value = null; if ( type.isAssignableFrom( Expression.class ) ) { value = expression; } else { value = expression.evaluate( context ); } if ( value != null ) { value = convertType( value, type ); } Object[] arguments = { value }; method.invoke( tag, arguments ); } tag.run( context, output ); }
public void run(JellyContext context, XMLOutput output) throws Exception { for (int i = 0, size = expressions.length; i < size; i++) { Expression expression = expressions[i]; Method method = methods[i]; Class type = types[i]; Object value = null; if (type.isAssignableFrom(Expression.class) && !type.isAssignableFrom(Object.class)) { value = expression; } else { value = expression.evaluate(context); } if (value != null) { value = convertType(value, type); } Object[] arguments = { value }; method.invoke(tag, arguments); } tag.setContext(context); tag.run(context, output); }
public void run(Context context, XMLOutput output) throws Exception { // initialize all the properties of the tag before its used // if there is a problem abort this tag for ( int i = 0, size = expressions.length; i < size; i++ ) { Expression expression = expressions[i]; Method method = methods[i]; Class type = types[i]; // some types are Expression objects so let the tag // evaluate them Object value = null; if ( type.isAssignableFrom( Expression.class ) ) { value = expression; } else { value = expression.evaluate( context ); } // convert value to correct type if ( value != null ) { value = convertType( value, type ); } Object[] arguments = { value }; method.invoke( tag, arguments ); } tag.run( context, output ); }
logger.debug("SQLObject children got inserted: "+e); SQLObject o = e.getSQLSource(); SQLObject[] c = e.getChildren(); for (int i = 0; i < c.length; i++) { if (c[i] instanceof SQLTable) { c[i].addSQLObjectListener(this); } }
public void dbChildrenInserted(SQLObjectEvent e) { firePropertyChange("model.children", null, null); revalidate(); }
logger.debug("SQLObject children got removed: "+e); SQLObject o = e.getSQLSource(); SQLObject[] c = e.getChildren(); for (int i = 0; i < c.length; i++) { if (c[i] instanceof SQLTable) { c[i].removeSQLObjectListener(this); for (int j = 0; j < getComponentCount(); j++) { TablePane tp = (TablePane) getComponent(j); if (tp.getModel() == c[i]) { remove(j); } } } }
public void dbChildrenRemoved(SQLObjectEvent e) { firePropertyChange("model.children", null, null); revalidate(); }
firePropertyChange("model.children", null, null); revalidate();
throw new UnsupportedOperationException ("FIXME: we have to make sure we're listening to the right objects now!");
public void dbStructureChanged(SQLObjectEvent e) { firePropertyChange("model.children", null, null); revalidate(); }
tagScript = (TagScript) tagScriptStack.remove(tagScriptStack.size() - 1); if (tagScript != null) { if (textBuffer.length() > 0) { script.addScript(new TextScript(textBuffer.toString())); textBuffer.setLength(0); } script = (ScriptBlock) scriptStack.pop();
try { tagScript = (TagScript) tagScriptStack.remove(tagScriptStack.size() - 1); if (tagScript != null) { if (textBuffer.length() > 0) { addTextScript(textBuffer.toString()); textBuffer.setLength(0); } script = (ScriptBlock) scriptStack.pop(); } else { textBuffer.append("</"); textBuffer.append(qName); textBuffer.append(">"); } int size = tagStack.size(); if ( size <= 0 ) { parentTag = null; } else { parentTag = (Tag) tagStack.remove( size - 1 ); }
public void endElement(String namespaceURI, String localName, String qName) throws SAXException { tagScript = (TagScript) tagScriptStack.remove(tagScriptStack.size() - 1); if (tagScript != null) { if (textBuffer.length() > 0) { script.addScript(new TextScript(textBuffer.toString())); textBuffer.setLength(0); } script = (ScriptBlock) scriptStack.pop(); } else { textBuffer.append("</"); textBuffer.append(qName); textBuffer.append(">"); } int size = tagStack.size(); if ( size <= 0 ) { parentTag = null; } else { parentTag = (Tag) tagStack.remove( size - 1 ); } }
else { textBuffer.append("</"); textBuffer.append(qName); textBuffer.append(">");
catch (SAXException e) { throw e;
public void endElement(String namespaceURI, String localName, String qName) throws SAXException { tagScript = (TagScript) tagScriptStack.remove(tagScriptStack.size() - 1); if (tagScript != null) { if (textBuffer.length() > 0) { script.addScript(new TextScript(textBuffer.toString())); textBuffer.setLength(0); } script = (ScriptBlock) scriptStack.pop(); } else { textBuffer.append("</"); textBuffer.append(qName); textBuffer.append(">"); } int size = tagStack.size(); if ( size <= 0 ) { parentTag = null; } else { parentTag = (Tag) tagStack.remove( size - 1 ); } }
int size = tagStack.size(); if ( size <= 0 ) { parentTag = null; } else { parentTag = (Tag) tagStack.remove( size - 1 );
catch (Exception e) { log.error( "Caught exception: " + e, e ); throw new SAXException( "Runtime Exception: " + e, e );
public void endElement(String namespaceURI, String localName, String qName) throws SAXException { tagScript = (TagScript) tagScriptStack.remove(tagScriptStack.size() - 1); if (tagScript != null) { if (textBuffer.length() > 0) { script.addScript(new TextScript(textBuffer.toString())); textBuffer.setLength(0); } script = (ScriptBlock) scriptStack.pop(); } else { textBuffer.append("</"); textBuffer.append(qName); textBuffer.append(">"); } int size = tagStack.size(); if ( size <= 0 ) { parentTag = null; } else { parentTag = (Tag) tagStack.remove( size - 1 ); } }
script.addScript(new TextScript(textBuffer.toString()));
addTextScript(textBuffer.toString());
public void startElement( String namespaceURI, String localName, String qName, Attributes list) throws SAXException { try { // if this is a tag then create a script to run it // otherwise pass the text to the current body tagScript = createTag(namespaceURI, localName, list); if (tagScript == null) { tagScript = createStaticTag(namespaceURI, localName, qName, list); } tagScriptStack.add(tagScript); if (tagScript != null) { // set parent relationship... Tag tag = tagScript.getTag(); tag.setParent(parentTag); // pop another tag onto the stack if ( parentTag != null ) { tagStack.add( parentTag ); } parentTag = tag; if (textBuffer.length() > 0) { script.addScript(new TextScript(textBuffer.toString())); textBuffer.setLength(0); } script.addScript(tagScript); // start a new body scriptStack.push(script); script = new ScriptBlock(); tag.setBody(script); } else { // XXXX: might wanna handle empty elements later... textBuffer.append("<"); textBuffer.append(qName); int size = list.getLength(); for (int i = 0; i < size; i++) { textBuffer.append(" "); textBuffer.append(list.getQName(i)); textBuffer.append("="); textBuffer.append("\""); textBuffer.append(list.getValue(i)); textBuffer.append("\""); } textBuffer.append(">"); } } catch (SAXException e) { throw e; } catch (Exception e) { log.error( "Caught exception: " + e, e ); throw new SAXException( "Runtime Exception: " + e, e ); } }
inputArray[3] = null;
public static void main(String[] args) { //this parses the command line arguments. if nogui mode is specified, //then haploText will execute whatever the user specified HaploText argParser = new HaploText(args); //if nogui is specified, then HaploText has already executed everything, and let Main() return //otherwise, we want to actually load and run the gui if(!argParser.isNogui()) { try { UIManager.put("EditorPane.selectionBackground",Color.lightGray); UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } window = new HaploView(); //setup view object window.setTitle(TITLE_STRING); window.setSize(1024,768); //center the window on the screen Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); window.setLocation((screen.width - window.getWidth()) / 2, (screen.height - window.getHeight()) / 2); window.setVisible(true); if (Constants.BETA_VERSION > 0){ UpdateChecker betaUc; betaUc = new UpdateChecker(); try { betaUc.checkForUpdate(); } catch(IOException ioe) { //this means we couldnt connect but we want it to die quietly } if (betaUc.isNewVersionAvailable()){ UpdateDisplayDialog betaUdp = new UpdateDisplayDialog(window,"Update Check",betaUc); betaUdp.pack(); betaUdp.setVisible(true); } } final SwingWorker worker = showUpdatePanel(); worker.start(); //parse command line stuff for input files or prompt data dialog String[] inputArray = new String[3]; if (argParser.getHapsFileName() != null){ inputArray[0] = argParser.getHapsFileName(); inputArray[1] = argParser.getInfoFileName(); inputArray[2] = null; window.readGenotypes(inputArray, HAPS_FILE, false); }else if (argParser.getPedFileName() != null){ inputArray[0] = argParser.getPedFileName(); inputArray[1] = argParser.getInfoFileName(); inputArray[2] = null; window.readGenotypes(inputArray, PED_FILE, false); }else if (argParser.getHapmapFileName() != null){ inputArray[0] = argParser.getHapmapFileName(); inputArray[1] = null; inputArray[2] = null; window.readGenotypes(inputArray, HMP_FILE, false); }else if (argParser.getPlinkFileName() != null){ inputArray[0] = argParser.getPlinkFileName(); inputArray[1] = argParser.getMapFileName(); inputArray[2] = null; window.readWGA(inputArray); }else{ ReadDataDialog readDialog = new ReadDataDialog("Welcome to HaploView", window); readDialog.pack(); readDialog.setVisible(true); } } }
return context.getVariable( (String) key );
return context.findVariable( (String) key );
public Object get(Object key) { return context.getVariable( (String) key ); }
*/ return new Dimension(lrh.left + lrh.right + i.left + i.right + hgap, lrh.height + i.top + i.bottom + (vgap*(rows)));
public Dimension minimumLayoutSize(Container parent) { LeftRightHeight lrh = calcSizes(parent); Insets i = parent.getInsets(); int rows = parent.getComponentCount() / 2; return new Dimension(lrh.left + lrh.right + i.left + i.right + hgap, lrh.height + i.top + i.bottom + (vgap*(rows-1))); }
formattedValue = value.toString();
formattedValue = pctFormat.format(value); } } else if (column == 14 ) { if (value == null) { formattedValue = "null"; } else { formattedValue = aldf.format(value);
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { String formattedValue = new String(); if ( column < 5) { if (value == null) { formattedValue = "null"; } else { formattedValue = ((SQLObject)value).getName(); } }else if (column == 5) { DateFormat df = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); formattedValue =df.format(new Date((Long) value)); }else if (column == 9 || column == 11) { if (value == null) { formattedValue = "N/A"; } else { formattedValue = value.toString(); } } else { if (value == null) { formattedValue = "null"; } else { formattedValue = value.toString(); } } return super.getTableCellRendererComponent(table,formattedValue,isSelected,hasFocus,row,column); }
boolean newColumnsInPk = false; if (insertionPoint == COLUMN_INDEX_END_OF_PK) { insertionPoint = tp.getModel().getPkSize(); newColumnsInPk = true; } else if (insertionPoint == COLUMN_INDEX_START_OF_NON_PK) { insertionPoint = tp.getModel().getPkSize(); newColumnsInPk = false; } else if (insertionPoint < 0) { insertionPoint = tp.getModel().getColumns().size(); newColumnsInPk = false; } else if (insertionPoint < tp.getModel().getPkSize()) { newColumnsInPk = true; } ArrayList paths = (ArrayList) t.getTransferData(importFlavor);
ArrayList<int[]> paths = (ArrayList<int[]>) t.getTransferData(importFlavor);
public void drop(DropTargetDropEvent dtde) { PlayPen pp = tp.getPlayPen(); Point loc = pp.unzoomPoint(new Point(dtde.getLocation())); loc.x -= tp.getX(); loc.y -= tp.getY(); logger.debug("Drop target drop event on "+tp.getName()+": "+dtde); Transferable t = dtde.getTransferable(); DataFlavor importFlavor = bestImportFlavor(pp, t.getTransferDataFlavors()); if (importFlavor == null) { dtde.rejectDrop(); tp.setInsertionPoint(COLUMN_INDEX_NONE); } else { try { DBTree dbtree = ArchitectFrame.getMainInstance().dbTree; // XXX: bad int insertionPoint = tp.pointToColumnIndex(loc); boolean newColumnsInPk = false; if (insertionPoint == COLUMN_INDEX_END_OF_PK) { insertionPoint = tp.getModel().getPkSize(); newColumnsInPk = true; } else if (insertionPoint == COLUMN_INDEX_START_OF_NON_PK) { insertionPoint = tp.getModel().getPkSize(); newColumnsInPk = false; } else if (insertionPoint < 0) { insertionPoint = tp.getModel().getColumns().size(); newColumnsInPk = false; } else if (insertionPoint < tp.getModel().getPkSize()) { newColumnsInPk = true; } ArrayList paths = (ArrayList) t.getTransferData(importFlavor); logger.debug("Importing items from tree: "+paths);// Used to put the undo event adapter into a drag and drop state ArchitectFrame.getMainInstance().playpen.fireUndoCompoundEvent( new UndoCompoundEvent( this,EventTypes.DRAG_AND_DROP_START, "Starting drag and drop")); Iterator removeIt = paths.iterator(); // Create a list so we don't have a comodification error ArrayList removeList = new ArrayList(); while (removeIt.hasNext()) { removeList.add(dbtree.getNodeForDnDPath((int[]) removeIt.next())); } for(int ii = removeList.size()-1; ii > -1; ii--) { Object someData = removeList.get(ii); logger.debug("drop: got object of type "+someData.getClass().getName()); if (someData instanceof SQLTable) { SQLTable table = (SQLTable) someData; if (table.getParentDatabase() == tp.getModel().getParentDatabase()) { // can't import table from target into target!! dtde.rejectDrop(); } else { dtde.acceptDrop(DnDConstants.ACTION_COPY); tp.getModel().inherit(insertionPoint, table); dtde.dropComplete(true); } } else if (someData instanceof SQLColumn) { SQLColumn col = (SQLColumn) someData; if (col.getParentTable() == tp.getModel()) { // moving column inside the same table dtde.acceptDrop(DnDConstants.ACTION_MOVE); int oldIndex = col.getParent().getChildren().indexOf(col); if (insertionPoint > oldIndex) { insertionPoint--; } tp.getModel().changeColumnIndex(oldIndex, insertionPoint); dtde.dropComplete(true); } else if (col.getParentTable().getParentDatabase() == tp.getModel().getParentDatabase()) { // moving column within playpen dtde.acceptDrop(DnDConstants.ACTION_MOVE); col.getParentTable().removeColumn(col); logger.debug("Moving column '"+col.getName() +"' to table '"+tp.getModel().getName() +"' at position "+insertionPoint); tp.getModel().addColumn(insertionPoint, col); if (newColumnsInPk) { col.setPrimaryKeySeq(new Integer(1)); } else { col.setPrimaryKeySeq(null); } dtde.dropComplete(true); } else { // importing column from a source database dtde.acceptDrop(DnDConstants.ACTION_COPY); tp.getModel().inherit(insertionPoint, col, newColumnsInPk); logger.debug("Inherited "+col.getName()+" to table"); dtde.dropComplete(true); } } else { dtde.rejectDrop(); } } } catch (Exception ex) { // Trying to show this dialog sometimes hangs the app in OS X //JOptionPane.showMessageDialog(tp, "Drop failed: "+ex.getMessage()); logger.error("Error processing drop operation", ex); dtde.rejectDrop(); } finally { tp.setInsertionPoint(COLUMN_INDEX_NONE); tp.getModel().normalizePrimaryKey();// Used to put the undo event adapter into a // regular state ArchitectFrame.getMainInstance().playpen.fireUndoCompoundEvent( new UndoCompoundEvent( this,EventTypes.DRAG_AND_DROP_END, "End drag and drop")); } } }
Iterator removeIt = paths.iterator(); ArrayList removeList = new ArrayList(); while (removeIt.hasNext()) { removeList.add(dbtree.getNodeForDnDPath((int[]) removeIt.next())); } for(int ii = removeList.size()-1; ii > -1; ii--) { Object someData = removeList.get(ii); logger.debug("drop: got object of type "+someData.getClass().getName()); if (someData instanceof SQLTable) { SQLTable table = (SQLTable) someData; if (table.getParentDatabase() == tp.getModel().getParentDatabase()) { dtde.rejectDrop(); } else { dtde.acceptDrop(DnDConstants.ACTION_COPY); tp.getModel().inherit(insertionPoint, table); dtde.dropComplete(true); } } else if (someData instanceof SQLColumn) { SQLColumn col = (SQLColumn) someData; if (col.getParentTable() == tp.getModel()) { dtde.acceptDrop(DnDConstants.ACTION_MOVE); int oldIndex = col.getParent().getChildren().indexOf(col); if (insertionPoint > oldIndex) { insertionPoint--; } tp.getModel().changeColumnIndex(oldIndex, insertionPoint); dtde.dropComplete(true); } else if (col.getParentTable().getParentDatabase() == tp.getModel().getParentDatabase()) { dtde.acceptDrop(DnDConstants.ACTION_MOVE); col.getParentTable().removeColumn(col); logger.debug("Moving column '"+col.getName() +"' to table '"+tp.getModel().getName() +"' at position "+insertionPoint); tp.getModel().addColumn(insertionPoint, col); if (newColumnsInPk) { col.setPrimaryKeySeq(new Integer(1)); } else { col.setPrimaryKeySeq(null); } dtde.dropComplete(true); } else { dtde.acceptDrop(DnDConstants.ACTION_COPY); tp.getModel().inherit(insertionPoint, col, newColumnsInPk); logger.debug("Inherited "+col.getName()+" to table"); dtde.dropComplete(true); } } else { dtde.rejectDrop(); }
ArrayList<SQLObject> droppedItems = new ArrayList<SQLObject>(); for (int[] path : paths) { droppedItems.add(dbtree.getNodeForDnDPath(path));
public void drop(DropTargetDropEvent dtde) { PlayPen pp = tp.getPlayPen(); Point loc = pp.unzoomPoint(new Point(dtde.getLocation())); loc.x -= tp.getX(); loc.y -= tp.getY(); logger.debug("Drop target drop event on "+tp.getName()+": "+dtde); Transferable t = dtde.getTransferable(); DataFlavor importFlavor = bestImportFlavor(pp, t.getTransferDataFlavors()); if (importFlavor == null) { dtde.rejectDrop(); tp.setInsertionPoint(COLUMN_INDEX_NONE); } else { try { DBTree dbtree = ArchitectFrame.getMainInstance().dbTree; // XXX: bad int insertionPoint = tp.pointToColumnIndex(loc); boolean newColumnsInPk = false; if (insertionPoint == COLUMN_INDEX_END_OF_PK) { insertionPoint = tp.getModel().getPkSize(); newColumnsInPk = true; } else if (insertionPoint == COLUMN_INDEX_START_OF_NON_PK) { insertionPoint = tp.getModel().getPkSize(); newColumnsInPk = false; } else if (insertionPoint < 0) { insertionPoint = tp.getModel().getColumns().size(); newColumnsInPk = false; } else if (insertionPoint < tp.getModel().getPkSize()) { newColumnsInPk = true; } ArrayList paths = (ArrayList) t.getTransferData(importFlavor); logger.debug("Importing items from tree: "+paths);// Used to put the undo event adapter into a drag and drop state ArchitectFrame.getMainInstance().playpen.fireUndoCompoundEvent( new UndoCompoundEvent( this,EventTypes.DRAG_AND_DROP_START, "Starting drag and drop")); Iterator removeIt = paths.iterator(); // Create a list so we don't have a comodification error ArrayList removeList = new ArrayList(); while (removeIt.hasNext()) { removeList.add(dbtree.getNodeForDnDPath((int[]) removeIt.next())); } for(int ii = removeList.size()-1; ii > -1; ii--) { Object someData = removeList.get(ii); logger.debug("drop: got object of type "+someData.getClass().getName()); if (someData instanceof SQLTable) { SQLTable table = (SQLTable) someData; if (table.getParentDatabase() == tp.getModel().getParentDatabase()) { // can't import table from target into target!! dtde.rejectDrop(); } else { dtde.acceptDrop(DnDConstants.ACTION_COPY); tp.getModel().inherit(insertionPoint, table); dtde.dropComplete(true); } } else if (someData instanceof SQLColumn) { SQLColumn col = (SQLColumn) someData; if (col.getParentTable() == tp.getModel()) { // moving column inside the same table dtde.acceptDrop(DnDConstants.ACTION_MOVE); int oldIndex = col.getParent().getChildren().indexOf(col); if (insertionPoint > oldIndex) { insertionPoint--; } tp.getModel().changeColumnIndex(oldIndex, insertionPoint); dtde.dropComplete(true); } else if (col.getParentTable().getParentDatabase() == tp.getModel().getParentDatabase()) { // moving column within playpen dtde.acceptDrop(DnDConstants.ACTION_MOVE); col.getParentTable().removeColumn(col); logger.debug("Moving column '"+col.getName() +"' to table '"+tp.getModel().getName() +"' at position "+insertionPoint); tp.getModel().addColumn(insertionPoint, col); if (newColumnsInPk) { col.setPrimaryKeySeq(new Integer(1)); } else { col.setPrimaryKeySeq(null); } dtde.dropComplete(true); } else { // importing column from a source database dtde.acceptDrop(DnDConstants.ACTION_COPY); tp.getModel().inherit(insertionPoint, col, newColumnsInPk); logger.debug("Inherited "+col.getName()+" to table"); dtde.dropComplete(true); } } else { dtde.rejectDrop(); } } } catch (Exception ex) { // Trying to show this dialog sometimes hangs the app in OS X //JOptionPane.showMessageDialog(tp, "Drop failed: "+ex.getMessage()); logger.error("Error processing drop operation", ex); dtde.rejectDrop(); } finally { tp.setInsertionPoint(COLUMN_INDEX_NONE); tp.getModel().normalizePrimaryKey();// Used to put the undo event adapter into a // regular state ArchitectFrame.getMainInstance().playpen.fireUndoCompoundEvent( new UndoCompoundEvent( this,EventTypes.DRAG_AND_DROP_END, "End drag and drop")); } } }
if (evt.getClickCount() == 2) {
if ((evt.getModifiers() & MouseEvent.BUTTON1_MASK) != 0 && evt.getClickCount() == 2) {
public void mouseClicked(MouseEvent evt) { if (evt.getClickCount() == 2) { TablePane tp = (TablePane) evt.getSource(); if (tp.isSelected()) { ArchitectFrame af = ArchitectFrame.getMainInstance(); int selectedColIndex = tp.getSelectedColumnIndex(); if (selectedColIndex == COLUMN_INDEX_NONE) { af.editTableAction.actionPerformed (new ActionEvent(tp, ActionEvent.ACTION_PERFORMED, "DoubleClick")); } else if (selectedColIndex >= 0) { af.editColumnAction.actionPerformed (new ActionEvent(tp, ActionEvent.ACTION_PERFORMED, "DoubleClick")); } } } }
for (int i = 0; i < st.countTokens()-1; i++){
int numPieces = st.countTokens()-1; for (int i = 0; i < numPieces; i++){
void browse(int browseType){ String name; String markerInfoName = ""; HaploView h = (HaploView) this.getParent(); h.fc.setSelectedFile(new File("")); int returned = h.fc.showOpenDialog(this); if (returned != JFileChooser.APPROVE_OPTION) return; File file = h.fc.getSelectedFile(); if (browseType == GENO){ name = file.getName(); genoFileField.setText(file.getParent()+File.separator+name); if(infoFileField.getText().equals("")){ //baseName should be everything but the final ".XXX" extension StringTokenizer st = new StringTokenizer(name,"."); String baseName = st.nextToken(); for (int i = 0; i < st.countTokens()-1; i++){ baseName = baseName.concat(".").concat(st.nextToken()); } //check for info file for original file sample.haps //either sample.haps.info or sample.info File maybeMarkers1 = new File(file.getParent(), name + MARKER_DATA_EXT); File maybeMarkers2 = new File(file.getParent(), baseName + MARKER_DATA_EXT); if (maybeMarkers1.exists()){ markerInfoName = maybeMarkers1.getName(); }else if (maybeMarkers2.exists()){ markerInfoName = maybeMarkers2.getName(); }else{ return; } infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }else if (browseType==INFO){ markerInfoName = file.getName(); infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }
System.out.println(baseName);
void browse(int browseType){ String name; String markerInfoName = ""; HaploView h = (HaploView) this.getParent(); h.fc.setSelectedFile(new File("")); int returned = h.fc.showOpenDialog(this); if (returned != JFileChooser.APPROVE_OPTION) return; File file = h.fc.getSelectedFile(); if (browseType == GENO){ name = file.getName(); genoFileField.setText(file.getParent()+File.separator+name); if(infoFileField.getText().equals("")){ //baseName should be everything but the final ".XXX" extension StringTokenizer st = new StringTokenizer(name,"."); String baseName = st.nextToken(); for (int i = 0; i < st.countTokens()-1; i++){ baseName = baseName.concat(".").concat(st.nextToken()); } //check for info file for original file sample.haps //either sample.haps.info or sample.info File maybeMarkers1 = new File(file.getParent(), name + MARKER_DATA_EXT); File maybeMarkers2 = new File(file.getParent(), baseName + MARKER_DATA_EXT); if (maybeMarkers1.exists()){ markerInfoName = maybeMarkers1.getName(); }else if (maybeMarkers2.exists()){ markerInfoName = maybeMarkers2.getName(); }else{ return; } infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }else if (browseType==INFO){ markerInfoName = file.getName(); infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }
if (currentInd.getZeroed(i)){ continue; }
private void buildCCSet(PedFile pf, Vector affectedStatus, TreeSet snpsToBeTested){ ArrayList results = new ArrayList(); int numMarkers = Chromosome.getUnfilteredSize(); Vector indList = pf.getUnrelatedIndividuals(); int numInds = indList.size(); if(affectedStatus == null || affectedStatus.size() != indList.size()) { affectedStatus = new Vector(indList.size()); for(int i=0;i<indList.size();i++) { Individual tempInd = ((Individual)indList.get(i)); affectedStatus.add(new Integer(tempInd.getAffectedStatus())); } } boolean[] useable = new boolean[indList.size()]; Arrays.fill(useable, false); //this loop determines who is eligible to be used for the case/control association test for(int i=0;i<useable.length;i++) { Individual tempInd = ((Individual)indList.get(i)); Family tempFam = pf.getFamily(tempInd.getFamilyID()); //need to check to make sure we don't include both parents and kids of trios //so, we only set useable[i] to true if Individual at index i is not the child of a trio in the indList if (!(tempFam.containsMember(tempInd.getMomID()) && tempFam.containsMember(tempInd.getDadID()))){ useable[i] = true; } else{ try{ if (!(indList.contains(tempFam.getMember(tempInd.getMomID())) || indList.contains(tempFam.getMember(tempInd.getDadID())))){ useable[i] = true; } }catch (PedFileException pfe){ } } } for (int i = 0; i < numMarkers; i++){ SNP currentMarker = Chromosome.getUnfilteredMarker(i); if (snpsToBeTested.contains(currentMarker)){ byte allele1 = 0, allele2 = 0; int[][] counts = new int[2][2]; Individual currentInd; for (int j = 0; j < numInds; j++){ if(useable[j]) { currentInd = (Individual)indList.get(j); int cc = ((Integer)affectedStatus.get(j)).intValue(); if (cc == 0) continue; if (cc == 2) cc = 0; byte a1 = currentInd.getAllele(i,0); byte a2 = currentInd.getAllele(i,1); if (a1 >= 5 && a2 >= 5){ counts[cc][0]++; counts[cc][1]++; if (allele1 == 0){ allele1 = (byte)(a1 - 4); allele2 = (byte)(a2 - 4); } }else{ //seed the alleles as soon as they're found if (allele1 == 0){ allele1 = a1; if (a1 != a2){ allele2 = a2; } }else if (allele2 == 0){ if (a1 != allele1){ allele2 = a1; }else if (a2 != allele1){ allele2 = a2; } } if (a1 != 0){ if (a1 == allele1){ counts[cc][0] ++; }else{ counts[cc][1] ++; } } if (currentInd.getGender() == 2 || !Chromosome.getDataChrom().equalsIgnoreCase("chrx")){ if (a2 != 0){ if (a2 == allele1){ counts[cc][0]++; }else{ counts[cc][1]++; } } } } } } int[] g1 = {allele1}; int[] g2 = {allele2}; int[] m = {i}; Haplotype thisSNP1 = new Haplotype(g1, 0, m, null); thisSNP1.setCaseCount(counts[0][0]); thisSNP1.setControlCount(counts[1][0]); Haplotype thisSNP2 = new Haplotype(g2, 0, m, null); thisSNP2.setCaseCount(counts[0][1]); thisSNP2.setControlCount(counts[1][1]); Haplotype[] daBlock = {thisSNP1, thisSNP2}; results.add(new MarkerAssociationResult(daBlock, currentMarker.getDisplayName(), currentMarker)); } } this.results = new Vector(results); }
urlString.append(value);
try { urlString.append(URLEncoder.encode(value, "UTF-8")); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); }
public static String appendURLParam(String url, String param, String value){ StringBuffer urlString = new StringBuffer(url); if (url.indexOf("?") == -1) { urlString.append("?"); }else if(url.endsWith("&") == false) { urlString.append("&"); } urlString.append(param); urlString.append("="); urlString.append(value); return urlString.toString(); }
public Tag createTag() {
public Tag createTag(String name, Attributes attributes) {
public void doTag(XMLOutput output) throws Exception { invokeBody(output); if (name == null) { throw new MissingAttributeException("name"); } if (dynaClass == null) { throw new MissingAttributeException("dynaClass"); } final DynaClass theDynaClass = dynaClass; final Map beanAttributes = (attributes != null) ? attributes : EMPTY_MAP; TagFactory factory = new TagFactory() { public Tag createTag() { return new DynamicDynaBeanTag(theDynaClass, beanAttributes, varAttribute); } }; getTagLibrary().registerBeanTag(name, factory); // now lets clear the attributes for next invocation and help the GC attributes = null; }
public Tag createTag() {
public Tag createTag(String name, Attributes attributes) {
public Tag createTag() { return new DynamicDynaBeanTag(theDynaClass, beanAttributes, varAttribute); }
returnStr += dprime;
NumberFormat nf = NumberFormat.getInstance(); nf.setMinimumFractionDigits(2); nf.setMaximumFractionDigits(2); returnStr += nf.format(dprime);
public String computeDPrime(int a, int b, int c, int d, int e, double f){ int i,j,k,count,itmp; int low_i = 0; int high_i = 0; double[] nAA = new double[1]; double[] nBB = new double[1]; double[] nAB = new double[1]; double[] nBA = new double[1]; double[] pAA = new double[1]; double[] pBB = new double[1]; double[] pAB = new double[1]; double[] pBA = new double[1]; double loglike, oldloglike, meand, mean2d, sd; double g,h,m,tmp,r; double num, denom1, denom2, denom, dprime, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, r2; double tmpAA, tmpAB, tmpBA, tmpBB, dpr, tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=(double)a; known[AB]=(double)b; known[BA]=(double)c; known[BB]=(double)d; unknownDH=e; total_chroms= a+b+c+d+(2*unknownDH); pA1 = (double) (a+b+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (double) (a+c+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = f; /* set initial conditions */ if (const_prob < 0.00) { pAA[0]=pA1*pA2; pAB[0]=pA1*pB2; pBA[0]=pB1*pA2; pBB[0]=pB1*pB2; } else { pAA[0]=const_prob; pAB[0]=const_prob; pBA[0]=const_prob; pBB[0]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,0); estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,count); loglike = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=pAA[0]; pAA[0]=pAB[0]; pAB[0]=tmp; tmp=pBB[0]; pBB[0]=pBA[0]; pBA[0]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=nAA[0]; nAA[0]=nAB[0]; nAB[0]=tmp; tmp=nBB[0]; nBB[0]=nBA[0]; nBA[0]=tmp; /* num has now undergone a sign change */ num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (pAA[0]+pBA[0])*(pBA[0]+pBB[0]); denom2 = (pAA[0]+pAB[0])*(pAB[0]+pBB[0]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ r2 = num*num/(pA1*pB1*pA2*pB2); /* we've computed D', its' LOD, and r^2 - let's store them and then compute confidence intervals */ String returnStr = new String(""); returnStr += dprime; returnStr += "\t"; returnStr += loglike1-loglike0; returnStr += "\t"; returnStr += r2; returnStr += "\t"; real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } returnStr += (double) low_i/100.0; returnStr += "\t"; returnStr += (double) high_i/100.0; return returnStr; }
returnStr += loglike1-loglike0;
returnStr += nf.format(loglike1-loglike0);
public String computeDPrime(int a, int b, int c, int d, int e, double f){ int i,j,k,count,itmp; int low_i = 0; int high_i = 0; double[] nAA = new double[1]; double[] nBB = new double[1]; double[] nAB = new double[1]; double[] nBA = new double[1]; double[] pAA = new double[1]; double[] pBB = new double[1]; double[] pAB = new double[1]; double[] pBA = new double[1]; double loglike, oldloglike, meand, mean2d, sd; double g,h,m,tmp,r; double num, denom1, denom2, denom, dprime, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, r2; double tmpAA, tmpAB, tmpBA, tmpBB, dpr, tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=(double)a; known[AB]=(double)b; known[BA]=(double)c; known[BB]=(double)d; unknownDH=e; total_chroms= a+b+c+d+(2*unknownDH); pA1 = (double) (a+b+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (double) (a+c+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = f; /* set initial conditions */ if (const_prob < 0.00) { pAA[0]=pA1*pA2; pAB[0]=pA1*pB2; pBA[0]=pB1*pA2; pBB[0]=pB1*pB2; } else { pAA[0]=const_prob; pAB[0]=const_prob; pBA[0]=const_prob; pBB[0]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,0); estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,count); loglike = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=pAA[0]; pAA[0]=pAB[0]; pAB[0]=tmp; tmp=pBB[0]; pBB[0]=pBA[0]; pBA[0]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=nAA[0]; nAA[0]=nAB[0]; nAB[0]=tmp; tmp=nBB[0]; nBB[0]=nBA[0]; nBA[0]=tmp; /* num has now undergone a sign change */ num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (pAA[0]+pBA[0])*(pBA[0]+pBB[0]); denom2 = (pAA[0]+pAB[0])*(pAB[0]+pBB[0]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ r2 = num*num/(pA1*pB1*pA2*pB2); /* we've computed D', its' LOD, and r^2 - let's store them and then compute confidence intervals */ String returnStr = new String(""); returnStr += dprime; returnStr += "\t"; returnStr += loglike1-loglike0; returnStr += "\t"; returnStr += r2; returnStr += "\t"; real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } returnStr += (double) low_i/100.0; returnStr += "\t"; returnStr += (double) high_i/100.0; return returnStr; }
returnStr += r2;
returnStr += nf.format(r2);
public String computeDPrime(int a, int b, int c, int d, int e, double f){ int i,j,k,count,itmp; int low_i = 0; int high_i = 0; double[] nAA = new double[1]; double[] nBB = new double[1]; double[] nAB = new double[1]; double[] nBA = new double[1]; double[] pAA = new double[1]; double[] pBB = new double[1]; double[] pAB = new double[1]; double[] pBA = new double[1]; double loglike, oldloglike, meand, mean2d, sd; double g,h,m,tmp,r; double num, denom1, denom2, denom, dprime, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, r2; double tmpAA, tmpAB, tmpBA, tmpBB, dpr, tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=(double)a; known[AB]=(double)b; known[BA]=(double)c; known[BB]=(double)d; unknownDH=e; total_chroms= a+b+c+d+(2*unknownDH); pA1 = (double) (a+b+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (double) (a+c+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = f; /* set initial conditions */ if (const_prob < 0.00) { pAA[0]=pA1*pA2; pAB[0]=pA1*pB2; pBA[0]=pB1*pA2; pBB[0]=pB1*pB2; } else { pAA[0]=const_prob; pAB[0]=const_prob; pBA[0]=const_prob; pBB[0]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,0); estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(pAA[0],pAB[0],pBA[0],pBB[0],nAA,nAB,nBA,nBB,count); loglike = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(nAA[0],nAB[0],nBA[0],nBB[0],pAA,pAB,pBA,pBB); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(pAA[0]) + known[AB]*log10(pAB[0]) + known[BA]*log10(pBA[0]) + known[BB]*log10(pBB[0]) + (double)unknownDH*log10(pAA[0]*pBB[0] + pAB[0]*pBA[0]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=pAA[0]; pAA[0]=pAB[0]; pAB[0]=tmp; tmp=pBB[0]; pBB[0]=pBA[0]; pBA[0]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=nAA[0]; nAA[0]=nAB[0]; nAB[0]=tmp; tmp=nBB[0]; nBB[0]=nBA[0]; nBA[0]=tmp; /* num has now undergone a sign change */ num = pAA[0]*pBB[0] - pAB[0]*pBA[0]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (pAA[0]+pBA[0])*(pBA[0]+pBB[0]); denom2 = (pAA[0]+pAB[0])*(pAB[0]+pBB[0]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ r2 = num*num/(pA1*pB1*pA2*pB2); /* we've computed D', its' LOD, and r^2 - let's store them and then compute confidence intervals */ String returnStr = new String(""); returnStr += dprime; returnStr += "\t"; returnStr += loglike1-loglike0; returnStr += "\t"; returnStr += r2; returnStr += "\t"; real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } returnStr += (double) low_i/100.0; returnStr += "\t"; returnStr += (double) high_i/100.0; return returnStr; }
System.err.println( "getNewFname " + date + " " + strExtension );
log.debug( "getNewFname " + date + " " + strExtension );
private File getNewFname( java.util.Date date, String strExtension ) { System.err.println( "getNewFname " + date + " " + strExtension ); SimpleDateFormat fmt = new SimpleDateFormat( "yyyy" ); String strYear = fmt.format( date ); fmt.applyPattern( "yyyyMM" ); String strMonth = fmt.format( date ); fmt.applyPattern( "yyyyMMdd" ); String strDate = fmt.format( date ); File yearDir = new File( volumeBaseDir, strYear ); System.err.println( "YearDir: " + yearDir ); if ( !yearDir.exists() ) { System.err.println( "making yeardir" ); if ( !yearDir.mkdir() ) { log.error( "Failed to create directory " + yearDir.getAbsoluteFile() ); } } // Create the month directeory if it does not exist yet File monthDir = new File ( yearDir, strMonth ); System.err.println( "MontDir: " + monthDir ); if ( !monthDir.exists() ) { System.err.println( "making yeardir" ); if ( !monthDir.mkdir() ) { log.error( "Failed to create " + monthDir.getAbsolutePath() ); } } // Find a free order num for this file String monthFiles[] = monthDir.list(); int orderNum = 1; for ( int n = 0; n < monthFiles.length; n++ ) { if ( monthFiles[n].startsWith( strDate ) ) { int delimiterLoc = monthFiles[n].indexOf( "." ); String strFileNum = monthFiles[n].substring( strDate.length()+1, delimiterLoc ); int i = 0; try { i = Integer.parseInt( strFileNum ); } catch ( NumberFormatException e ) {} if ( i >= orderNum ) { orderNum = i+1; } } } String strOrderNum = String.valueOf( orderNum ); // Find the file extension String fname = strDate + "_"+ "00000".substring( 0, 5-strOrderNum.length())+ strOrderNum + "." + strExtension; File archiveFile = new File( monthDir, fname ); return archiveFile; }
System.err.println( "YearDir: " + yearDir );
log.debug( "YearDir: " + yearDir );
private File getNewFname( java.util.Date date, String strExtension ) { System.err.println( "getNewFname " + date + " " + strExtension ); SimpleDateFormat fmt = new SimpleDateFormat( "yyyy" ); String strYear = fmt.format( date ); fmt.applyPattern( "yyyyMM" ); String strMonth = fmt.format( date ); fmt.applyPattern( "yyyyMMdd" ); String strDate = fmt.format( date ); File yearDir = new File( volumeBaseDir, strYear ); System.err.println( "YearDir: " + yearDir ); if ( !yearDir.exists() ) { System.err.println( "making yeardir" ); if ( !yearDir.mkdir() ) { log.error( "Failed to create directory " + yearDir.getAbsoluteFile() ); } } // Create the month directeory if it does not exist yet File monthDir = new File ( yearDir, strMonth ); System.err.println( "MontDir: " + monthDir ); if ( !monthDir.exists() ) { System.err.println( "making yeardir" ); if ( !monthDir.mkdir() ) { log.error( "Failed to create " + monthDir.getAbsolutePath() ); } } // Find a free order num for this file String monthFiles[] = monthDir.list(); int orderNum = 1; for ( int n = 0; n < monthFiles.length; n++ ) { if ( monthFiles[n].startsWith( strDate ) ) { int delimiterLoc = monthFiles[n].indexOf( "." ); String strFileNum = monthFiles[n].substring( strDate.length()+1, delimiterLoc ); int i = 0; try { i = Integer.parseInt( strFileNum ); } catch ( NumberFormatException e ) {} if ( i >= orderNum ) { orderNum = i+1; } } } String strOrderNum = String.valueOf( orderNum ); // Find the file extension String fname = strDate + "_"+ "00000".substring( 0, 5-strOrderNum.length())+ strOrderNum + "." + strExtension; File archiveFile = new File( monthDir, fname ); return archiveFile; }
System.err.println( "making yeardir" );
log.debug( "making yeardir" );
private File getNewFname( java.util.Date date, String strExtension ) { System.err.println( "getNewFname " + date + " " + strExtension ); SimpleDateFormat fmt = new SimpleDateFormat( "yyyy" ); String strYear = fmt.format( date ); fmt.applyPattern( "yyyyMM" ); String strMonth = fmt.format( date ); fmt.applyPattern( "yyyyMMdd" ); String strDate = fmt.format( date ); File yearDir = new File( volumeBaseDir, strYear ); System.err.println( "YearDir: " + yearDir ); if ( !yearDir.exists() ) { System.err.println( "making yeardir" ); if ( !yearDir.mkdir() ) { log.error( "Failed to create directory " + yearDir.getAbsoluteFile() ); } } // Create the month directeory if it does not exist yet File monthDir = new File ( yearDir, strMonth ); System.err.println( "MontDir: " + monthDir ); if ( !monthDir.exists() ) { System.err.println( "making yeardir" ); if ( !monthDir.mkdir() ) { log.error( "Failed to create " + monthDir.getAbsolutePath() ); } } // Find a free order num for this file String monthFiles[] = monthDir.list(); int orderNum = 1; for ( int n = 0; n < monthFiles.length; n++ ) { if ( monthFiles[n].startsWith( strDate ) ) { int delimiterLoc = monthFiles[n].indexOf( "." ); String strFileNum = monthFiles[n].substring( strDate.length()+1, delimiterLoc ); int i = 0; try { i = Integer.parseInt( strFileNum ); } catch ( NumberFormatException e ) {} if ( i >= orderNum ) { orderNum = i+1; } } } String strOrderNum = String.valueOf( orderNum ); // Find the file extension String fname = strDate + "_"+ "00000".substring( 0, 5-strOrderNum.length())+ strOrderNum + "." + strExtension; File archiveFile = new File( monthDir, fname ); return archiveFile; }
System.err.println( "MontDir: " + monthDir );
log.debug( "MontDir: " + monthDir );
private File getNewFname( java.util.Date date, String strExtension ) { System.err.println( "getNewFname " + date + " " + strExtension ); SimpleDateFormat fmt = new SimpleDateFormat( "yyyy" ); String strYear = fmt.format( date ); fmt.applyPattern( "yyyyMM" ); String strMonth = fmt.format( date ); fmt.applyPattern( "yyyyMMdd" ); String strDate = fmt.format( date ); File yearDir = new File( volumeBaseDir, strYear ); System.err.println( "YearDir: " + yearDir ); if ( !yearDir.exists() ) { System.err.println( "making yeardir" ); if ( !yearDir.mkdir() ) { log.error( "Failed to create directory " + yearDir.getAbsoluteFile() ); } } // Create the month directeory if it does not exist yet File monthDir = new File ( yearDir, strMonth ); System.err.println( "MontDir: " + monthDir ); if ( !monthDir.exists() ) { System.err.println( "making yeardir" ); if ( !monthDir.mkdir() ) { log.error( "Failed to create " + monthDir.getAbsolutePath() ); } } // Find a free order num for this file String monthFiles[] = monthDir.list(); int orderNum = 1; for ( int n = 0; n < monthFiles.length; n++ ) { if ( monthFiles[n].startsWith( strDate ) ) { int delimiterLoc = monthFiles[n].indexOf( "." ); String strFileNum = monthFiles[n].substring( strDate.length()+1, delimiterLoc ); int i = 0; try { i = Integer.parseInt( strFileNum ); } catch ( NumberFormatException e ) {} if ( i >= orderNum ) { orderNum = i+1; } } } String strOrderNum = String.valueOf( orderNum ); // Find the file extension String fname = strDate + "_"+ "00000".substring( 0, 5-strOrderNum.length())+ strOrderNum + "." + strExtension; File archiveFile = new File( monthDir, fname ); return archiveFile; }
if (counts == null){ return ("");
if(counts == null || counts.length == 1) { return "";
public String getRatios() { //if the array is null this is a block-title node, not an actual hap if (counts == null){ return (""); } nf.setMinimumFractionDigits(1); nf.setMaximumFractionDigits(1); for(int i= 0;i<counts.length;i++) { for(int j= 0;j<counts[i].length;j++) { counts[i][j] = (new Double(nf.format(counts[i][j]))).doubleValue(); } } if (counts.length == 1){ //TDT if (this.counts[0][0] > this.counts[0][1]){ return this.counts[0][0] + " : " + this.counts[0][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0]; } }else{ //case-control if (this.counts[0][0] > this.counts[0][1]){ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][0] + " : " + this.counts[0][1] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][0] + " : " + this.counts[0][1] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } }else{ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][1] + " : " + this.counts[0][0] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } } } }
nf.setMinimumFractionDigits(1); nf.setMaximumFractionDigits(1);
return nf.format(this.counts[0][0] / this.counts[0][1]) + ", " + nf.format(this.counts[1][0] / this.counts[1][1]) ;
public String getRatios() { //if the array is null this is a block-title node, not an actual hap if (counts == null){ return (""); } nf.setMinimumFractionDigits(1); nf.setMaximumFractionDigits(1); for(int i= 0;i<counts.length;i++) { for(int j= 0;j<counts[i].length;j++) { counts[i][j] = (new Double(nf.format(counts[i][j]))).doubleValue(); } } if (counts.length == 1){ //TDT if (this.counts[0][0] > this.counts[0][1]){ return this.counts[0][0] + " : " + this.counts[0][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0]; } }else{ //case-control if (this.counts[0][0] > this.counts[0][1]){ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][0] + " : " + this.counts[0][1] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][0] + " : " + this.counts[0][1] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } }else{ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][1] + " : " + this.counts[0][0] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } } } }
for(int i= 0;i<counts.length;i++) { for(int j= 0;j<counts[i].length;j++) { counts[i][j] = (new Double(nf.format(counts[i][j]))).doubleValue(); } } if (counts.length == 1){ if (this.counts[0][0] > this.counts[0][1]){ return this.counts[0][0] + " : " + this.counts[0][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0]; } }else{ if (this.counts[0][0] > this.counts[0][1]){ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][0] + " : " + this.counts[0][1] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][0] + " : " + this.counts[0][1] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } }else{ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][1] + " : " + this.counts[0][0] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } } }
public String getRatios() { //if the array is null this is a block-title node, not an actual hap if (counts == null){ return (""); } nf.setMinimumFractionDigits(1); nf.setMaximumFractionDigits(1); for(int i= 0;i<counts.length;i++) { for(int j= 0;j<counts[i].length;j++) { counts[i][j] = (new Double(nf.format(counts[i][j]))).doubleValue(); } } if (counts.length == 1){ //TDT if (this.counts[0][0] > this.counts[0][1]){ return this.counts[0][0] + " : " + this.counts[0][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0]; } }else{ //case-control if (this.counts[0][0] > this.counts[0][1]){ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][0] + " : " + this.counts[0][1] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][0] + " : " + this.counts[0][1] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } }else{ if (this.counts[1][0] > this.counts[1][1]){ return this.counts[0][1] + " : " + this.counts[0][0] + ", " + this.counts[1][0] + " : " + this.counts[1][1]; }else{ return this.counts[0][1] + " : " + this.counts[0][0] + "," + this.counts[1][1] + " : " + this.counts[1][0]; } } } }
"DoubleClick"));
ArchitectSwingConstants.ACTION_COMMAND_SRC_PLAYPEN));
public void mouseClicked(MouseEvent evt) { if (evt.getClickCount() == 2) { ArchitectFrame.getMainInstance().editRelationshipAction.actionPerformed (new ActionEvent(evt.getSource(), ActionEvent.ACTION_PERFORMED, "DoubleClick")); } }
return photoCollection;
if ( photoCollection != null ) { return photoCollection.getOrigCollection(); } return null;
public PhotoCollection getCollection() { return photoCollection; }
public Tag createTag ( ) throws Exception {
public Tag createTag(String name, Attributes attributes) throws Exception {
public Tag createTag ( ) throws Exception { return new ConstraintTag ( this ); }
public Tag createTag ( ) {
public Tag createTag(String name, Attributes attributes) {
public Tag createTag ( ) { return new ConstraintTag ( this ); // still scratching my head about "this" usage... }
assertEquals("target/testFileTag.tmp", "<html xmlns=\"http:
public void testSimpleFileTag() throws Exception { setUpScript("testFileTag.jelly"); Script script = getJelly().compileScript(); script.run(getJellyContext(), getXMLOutput()); FileInputStream fis = new FileInputStream("target/testFileTag.tmp"); String data = readInputStreamIntoString(fis); fis.close(); //FIXME This doesn't take into account attribute ordering //assertEquals("target/testFileTag.tmp", "<html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\"></html>", data); //assertTrue( System.getProperty( "java.runtime.version" ).equals( getJellyContext().getVariable("propertyName" ) ) ); }
"Updated application with ID "+config.getApplicationId());
"Updated application "+"\""+config.getName()+"\"");
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception { ApplicationForm appForm = (ApplicationForm)actionForm; ApplicationConfig config = ApplicationConfigManager.getApplicationConfig( appForm.getApplicationId()); assert config != null; config.setName(appForm.getName()); config.setHost(appForm.getHost()); if(appForm.getPort() != null) config.setPort(new Integer(appForm.getPort())); config.setUsername(appForm.getUsername()); final String password = appForm.getPassword(); if(password != null && !password.equals(config.getPassword())){ config.setPassword(password); } ApplicationConfigManager.updateApplication(config); UserActivityLogger.getInstance().logActivity( context.getUser().getUsername(), "Updated application with ID "+config.getApplicationId()); return mapping.findForward(Forwards.SUCCESS); }
public String toIdentifier(String logicalName, String physicalName) {
private String toIdentifier(String logicalName, String physicalName) {
public String toIdentifier(String logicalName, String physicalName) { // replace spaces with underscores if (logicalName == null) return null; logger.debug("getting physical name for: " + logicalName); String ident = logicalName.replace(' ','_'); logger.debug("after replace of spaces: " + ident); // see if it's a reserved word, and add something alpha to front if it is... if (isReservedWord(ident)) { ident = "X" + ident; logger.debug("identifier was reserved word, appending X: " + ident); } // replace anything that is not a letter, character, or underscore with an underscore... ident = ident.replaceAll("[^a-zA-Z0-9_@$#]", "_"); // first time through if (physicalName == null) { // length is ok if (ident.length() < 129) { return ident; } else { // length is too big logger.debug("truncating identifier: " + ident); String base = ident.substring(0,125); int tiebreaker = ((ident.hashCode() % 1000) + 1000) % 1000; logger.debug("new identifier: " + base + tiebreaker); return (base + tiebreaker); } } else { // back for more, which means that we probably // had a namespace conflict. Hack the ident down // to size if it's too big, and then generate // a hash tiebreaker using the ident and the // passed value physicalName logger.debug("physical identifier is not unique, regenerating: " + physicalName); String base = ident; if (ident.length() > 125) { base = ident.substring(0,125); } int tiebreaker = (((ident + physicalName).hashCode() % 1000) + 1000) % 1000; logger.debug("regenerated identifier is: " + (base + tiebreaker)); return (base + tiebreaker); } }
stylesheet.applyTemplates( source, select );
stylesheet.applyTemplates( source, select, mode );
public void doTag(XMLOutput output) throws JellyTagException { StylesheetTag tag = (StylesheetTag) findAncestorWithClass( StylesheetTag.class ); if (tag == null) { throw new JellyTagException( "<applyTemplates> tag must be inside a <stylesheet> tag" ); } Stylesheet stylesheet = tag.getStylesheet(); XMLOutput oldOutput = tag.getStylesheetOutput(); tag.setStylesheetOutput(output); Object source = tag.getXPathSource(); // for some reason, these DOM4J methods only throw Exception try { if ( select != null ) { stylesheet.applyTemplates( source, select ); } else { stylesheet.applyTemplates( source ); } } catch (Exception e) { throw new JellyTagException(e); } tag.setStylesheetOutput(oldOutput); // #### should support MODE!!! }
stylesheet.applyTemplates( source );
stylesheet.applyTemplates( source, mode );
public void doTag(XMLOutput output) throws JellyTagException { StylesheetTag tag = (StylesheetTag) findAncestorWithClass( StylesheetTag.class ); if (tag == null) { throw new JellyTagException( "<applyTemplates> tag must be inside a <stylesheet> tag" ); } Stylesheet stylesheet = tag.getStylesheet(); XMLOutput oldOutput = tag.getStylesheetOutput(); tag.setStylesheetOutput(output); Object source = tag.getXPathSource(); // for some reason, these DOM4J methods only throw Exception try { if ( select != null ) { stylesheet.applyTemplates( source, select ); } else { stylesheet.applyTemplates( source ); } } catch (Exception e) { throw new JellyTagException(e); } tag.setStylesheetOutput(oldOutput); // #### should support MODE!!! }
dataChrom = chrom.toLowerCase();
if (chrom != null){ dataChrom = chrom.toLowerCase(); }else{ dataChrom = null; }
public static void setDataChrom(String chrom) { dataChrom = chrom.toLowerCase(); }
public static Vector calcCCTDT(Vector chromosomes){
public static Vector calcCCTDT(PedFile pf){
public static Vector calcCCTDT(Vector chromosomes){ Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for (int i = 0; i < numMarkers; i++){ TDTResult thisResult = new TDTResult(Chromosome.getUnfilteredMarker(i)); for (int j = 0; j < chromosomes.size()-1; j++){ Chromosome theChrom = (Chromosome)chromosomes.get(j); j++; Chromosome nextChrom = (Chromosome)chromosomes.get(j); if (theChrom.getAffected()){ thisResult.tallyCCInd(theChrom.getUnfilteredGenotype(i), nextChrom.getUnfilteredGenotype(i), 0); }else{ thisResult.tallyCCInd(theChrom.getUnfilteredGenotype(i), nextChrom.getUnfilteredGenotype(i), 1); } } results.add(thisResult); } return results; }
for (int j = 0; j < chromosomes.size()-1; j++){ Chromosome theChrom = (Chromosome)chromosomes.get(j); j++; Chromosome nextChrom = (Chromosome)chromosomes.get(j); if (theChrom.getAffected()){ thisResult.tallyCCInd(theChrom.getUnfilteredGenotype(i), nextChrom.getUnfilteredGenotype(i), 0); }else{ thisResult.tallyCCInd(theChrom.getUnfilteredGenotype(i), nextChrom.getUnfilteredGenotype(i), 1);
Vector indList = pf.getOrder(); Individual currentInd; for (int j = 0; j < indList.size(); j++){ currentInd = (Individual)indList.elementAt(j); if (!currentInd.hasKids()){ thisResult.tallyCCInd(currentInd.getMarker(i), currentInd.getAffectedStatus());
public static Vector calcCCTDT(Vector chromosomes){ Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for (int i = 0; i < numMarkers; i++){ TDTResult thisResult = new TDTResult(Chromosome.getUnfilteredMarker(i)); for (int j = 0; j < chromosomes.size()-1; j++){ Chromosome theChrom = (Chromosome)chromosomes.get(j); j++; Chromosome nextChrom = (Chromosome)chromosomes.get(j); if (theChrom.getAffected()){ thisResult.tallyCCInd(theChrom.getUnfilteredGenotype(i), nextChrom.getUnfilteredGenotype(i), 0); }else{ thisResult.tallyCCInd(theChrom.getUnfilteredGenotype(i), nextChrom.getUnfilteredGenotype(i), 1); } } results.add(thisResult); } return results; }
public static Vector calcTrioTDT(Vector chromosomes) {
public static Vector calcTrioTDT(PedFile pf) throws PedFileException{
public static Vector calcTrioTDT(Vector chromosomes) { Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for(int k=0;k<numMarkers;k++){ results.add(new TDTResult(Chromosome.getUnfilteredMarker(k))); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); //System.out.println("ind1T: " + chrom1T.getPed() + "\t" + chrom1T.getIndividual() ); //System.out.println("ind1U: " + chrom1U.getPed() + "\t" + chrom1U.getIndividual() ); //System.out.println("ind2T: " + chrom2T.getPed() + "\t" + chrom2T.getIndividual() ); //System.out.println("ind2U: " + chrom2U.getPed() + "\t" + chrom2U.getIndividual() ); for(int j=0;j<numMarkers;j++){ if(!chrom1T.kidMissing[j] && !chrom2T.kidMissing[j]) { byte allele1T = chrom1T.getUnfilteredGenotype(j); byte allele1U = chrom1U.getUnfilteredGenotype(j); byte allele2T = chrom2T.getUnfilteredGenotype(j); byte allele2U = chrom2U.getUnfilteredGenotype(j); if( !(allele1T == 0 || allele1U == 0 || allele2T == 0 || allele2U == 0) ){ TDTResult curRes = (TDTResult)results.get(j); curRes.tallyTrioInd(allele1T,allele1U); curRes.tallyTrioInd(allele2T,allele2U); } } } } return results; }
for(int k=0;k<numMarkers;k++){ results.add(new TDTResult(Chromosome.getUnfilteredMarker(k))); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); for(int j=0;j<numMarkers;j++){ if(!chrom1T.kidMissing[j] && !chrom2T.kidMissing[j]) { byte allele1T = chrom1T.getUnfilteredGenotype(j); byte allele1U = chrom1U.getUnfilteredGenotype(j); byte allele2T = chrom2T.getUnfilteredGenotype(j); byte allele2U = chrom2U.getUnfilteredGenotype(j); if( !(allele1T == 0 || allele1U == 0 || allele2T == 0 || allele2U == 0) ){ TDTResult curRes = (TDTResult)results.get(j); curRes.tallyTrioInd(allele1T,allele1U); curRes.tallyTrioInd(allele2T,allele2U);
if (mom1 == kid1) { momT = mom1; momU = mom2; } else { momT = mom2; momU = mom1; } } else { if (dad1 == dad2 && mom1 != mom2) { dadT = dad1; dadU = dad2; if (kid1 == dad1) { momT = kid2; momU = kid1; } else { momT = kid1; momU = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { momT = mom1; momU = mom2; if (kid1 == mom1) { dadT = kid2; dadU = kid1; } else { dadT = kid1; dadU = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { dadT = dad1; dadU = dad1; momT = mom1; momU = mom1; } else { dadT = (byte)(4+dad1); dadU = (byte)(4+dad2); momT = (byte)(4+mom1); momU = (byte)(4+mom2); }
public static Vector calcTrioTDT(Vector chromosomes) { Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for(int k=0;k<numMarkers;k++){ results.add(new TDTResult(Chromosome.getUnfilteredMarker(k))); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); //System.out.println("ind1T: " + chrom1T.getPed() + "\t" + chrom1T.getIndividual() ); //System.out.println("ind1U: " + chrom1U.getPed() + "\t" + chrom1U.getIndividual() ); //System.out.println("ind2T: " + chrom2T.getPed() + "\t" + chrom2T.getIndividual() ); //System.out.println("ind2U: " + chrom2U.getPed() + "\t" + chrom2U.getIndividual() ); for(int j=0;j<numMarkers;j++){ if(!chrom1T.kidMissing[j] && !chrom2T.kidMissing[j]) { byte allele1T = chrom1T.getUnfilteredGenotype(j); byte allele1U = chrom1U.getUnfilteredGenotype(j); byte allele2T = chrom2T.getUnfilteredGenotype(j); byte allele2U = chrom2U.getUnfilteredGenotype(j); if( !(allele1T == 0 || allele1U == 0 || allele2T == 0 || allele2U == 0) ){ TDTResult curRes = (TDTResult)results.get(j); curRes.tallyTrioInd(allele1T,allele1U); curRes.tallyTrioInd(allele2T,allele2U); } } } } return results; }
}
results.add(thisResult); }
public static Vector calcTrioTDT(Vector chromosomes) { Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for(int k=0;k<numMarkers;k++){ results.add(new TDTResult(Chromosome.getUnfilteredMarker(k))); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); //System.out.println("ind1T: " + chrom1T.getPed() + "\t" + chrom1T.getIndividual() ); //System.out.println("ind1U: " + chrom1U.getPed() + "\t" + chrom1U.getIndividual() ); //System.out.println("ind2T: " + chrom2T.getPed() + "\t" + chrom2T.getIndividual() ); //System.out.println("ind2U: " + chrom2U.getPed() + "\t" + chrom2U.getIndividual() ); for(int j=0;j<numMarkers;j++){ if(!chrom1T.kidMissing[j] && !chrom2T.kidMissing[j]) { byte allele1T = chrom1T.getUnfilteredGenotype(j); byte allele1U = chrom1U.getUnfilteredGenotype(j); byte allele2T = chrom2T.getUnfilteredGenotype(j); byte allele2U = chrom2U.getUnfilteredGenotype(j); if( !(allele1T == 0 || allele1U == 0 || allele2T == 0 || allele2U == 0) ){ TDTResult curRes = (TDTResult)results.get(j); curRes.tallyTrioInd(allele1T,allele1U); curRes.tallyTrioInd(allele2T,allele2U); } } } } return results; }
return connection.createStatement(); }
Statement stmt = new StatementFacade(this, connection.createStatement()); return stmt; }
public Statement createStatement() throws SQLException { return connection.createStatement(); }
int poss_full;
long poss_full;
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } Iterator pitr = probMap.getKeySet().iterator(); int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
Iterator pitr = probMap.getKeySet().iterator();
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } Iterator pitr = probMap.getKeySet().iterator(); int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; if (probMap.get(next) > .001) {
for(long j=0;j<num_poss; j++){ hint[(int)j]=-1; if (probMap.get(new Long(j)) > .001) {
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } Iterator pitr = probMap.getKeySet().iterator(); int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m;
hlist[block][m]=(int)j; hprob[block][m]=probMap.get(new Long(j)); hint[(int)j]=m;
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } Iterator pitr = probMap.getKeySet().iterator(); int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
for (int j=0; j<poss_full; j++) {
for (long j=0; j<poss_full; j++) {
private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; //double[] prob = new double[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); } } probMap.normalize(total); iter++; } Iterator pitr = probMap.getKeySet().iterator(); int m=0; while(pitr.hasNext()) { Long next = (Long) pitr.next(); hint[next.intValue()]=-1; //todo: this hard coded threshold suxorz (probably elsewhere too) if (probMap.get(next) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=next.intValue(); hprob[block][m]=probMap.get(next); hint[next.intValue()]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total=(double)poss_full; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; doAssociationTests(affStatus, null); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (fullProbMap.get(new Long(j)) > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(new Long(j)))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
thumbCreatorThread = new ThumbCreatorThread( this ); thumbCreatorThread.start();
public PhotoCollectionThumbView() { super(); createUI(); }
if ( cmd == PHOTO_PROPS_CMD ) { showSelectionPropsDialog(); } else if ( cmd == PHOTO_SHOW_CMD ) { showSelectedPhoto(); } else if ( cmd == PHOTO_ROTATE_CW_CMD ) { rotateSelectedPhoto( 90 ); } else if ( cmd == PHOTO_ROTATE_CCW_CMD ) { rotateSelectedPhoto( -90 ); } else if ( cmd == PHOTO_ROTATE_180_CMD ) { rotateSelectedPhoto( 180 ); } else if ( cmd == PHOTO_ADD_TO_FOLDER_CMD ) {
if ( cmd == PHOTO_ADD_TO_FOLDER_CMD ) {
public void actionPerformed(ActionEvent e) { String cmd = e.getActionCommand(); if ( cmd == PHOTO_PROPS_CMD ) { showSelectionPropsDialog(); } else if ( cmd == PHOTO_SHOW_CMD ) { showSelectedPhoto(); } else if ( cmd == PHOTO_ROTATE_CW_CMD ) { rotateSelectedPhoto( 90 ); } else if ( cmd == PHOTO_ROTATE_CCW_CMD ) { rotateSelectedPhoto( -90 ); } else if ( cmd == PHOTO_ROTATE_180_CMD ) { rotateSelectedPhoto( 180 ); } else if ( cmd == PHOTO_ADD_TO_FOLDER_CMD ) { queryForNewFolder(); } }
setAutoscrolls( true );
void createUI() { photoTransferHandler = new PhotoCollectionTransferHandler( this ); setTransferHandler( photoTransferHandler ); addMouseListener( this ); addMouseMotionListener( this ); // Create the popup menu popup = new JPopupMenu(); JMenuItem propsItem = new JMenuItem( "Properties" ); propsItem.addActionListener( this ); propsItem.setActionCommand( PHOTO_PROPS_CMD ); JMenuItem showItem = new JMenuItem( "Show image" ); showItem.addActionListener( this ); showItem.setActionCommand( PHOTO_SHOW_CMD ); JMenuItem rotateCW = new JMenuItem( "Rotate 90 deg CW" ); rotateCW.addActionListener( this ); rotateCW.setActionCommand( PHOTO_ROTATE_CW_CMD ); JMenuItem rotateCCW = new JMenuItem( "Rotate 90 deg CCW" ); rotateCCW.addActionListener( this ); rotateCCW.setActionCommand( PHOTO_ROTATE_CCW_CMD ); JMenuItem rotate180deg = new JMenuItem( "Rotate 180 degrees" ); rotate180deg.addActionListener( this ); rotate180deg.setActionCommand( PHOTO_ROTATE_180_CMD ); JMenuItem addToFolder = new JMenuItem( "Add to folder..." ); addToFolder.addActionListener( this ); addToFolder.setActionCommand( PHOTO_ADD_TO_FOLDER_CMD ); exportSelectedAction = new ExportSelectedAction( this, "Export selected...", null, "Export the selected photos to from archive database to image files", KeyEvent.VK_X ); JMenuItem exportSelected = new JMenuItem( exportSelectedAction ); popup.add( showItem ); popup.add( propsItem ); popup.add( rotateCW ); popup.add( rotateCCW ); popup.add( rotate180deg ); popup.add( addToFolder ); popup.add( exportSelected ); MouseListener popupListener = new PopupListener(); addMouseListener( popupListener ); }