rem
stringlengths
0
477k
add
stringlengths
0
313k
context
stringlengths
6
599k
JMenuItem propsItem = new JMenuItem( "Properties" ); propsItem.addActionListener( this ); propsItem.setActionCommand( PHOTO_PROPS_CMD ); JMenuItem showItem = new JMenuItem( "Show image" ); showItem.addActionListener( this ); showItem.setActionCommand( PHOTO_SHOW_CMD ); JMenuItem rotateCW = new JMenuItem( "Rotate 90 deg CW" ); rotateCW.addActionListener( this ); rotateCW.setActionCommand( PHOTO_ROTATE_CW_CMD ); JMenuItem rotateCCW = new JMenuItem( "Rotate 90 deg CCW" ); rotateCCW.addActionListener( this ); rotateCCW.setActionCommand( PHOTO_ROTATE_CCW_CMD ); JMenuItem rotate180deg = new JMenuItem( "Rotate 180 degrees" ); rotate180deg.addActionListener( this ); rotate180deg.setActionCommand( PHOTO_ROTATE_180_CMD );
editSelectionPropsAction = new EditSelectionPropsAction( this, "Properties...", null, "Edit properties of the selected photos", KeyEvent.VK_P ); JMenuItem propsItem = new JMenuItem( editSelectionPropsAction ); showSelectedPhotoAction = new ShowSelectedPhotoAction( this, "Show image", null, "Show the selected phot(s)", KeyEvent.VK_S ); JMenuItem showItem = new JMenuItem( showSelectedPhotoAction ); rotateCWAction = new RotateSelectedPhotoAction( this, 90, "Rotate 90 deg CW", null, "Rotates the selected photo", KeyEvent.VK_R ); JMenuItem rotateCW = new JMenuItem( rotateCWAction ); rotateCCWAction = new RotateSelectedPhotoAction( this, 270, "Rotate 90 deg CCW", null, "Rotates the selected photo", KeyEvent.VK_W ); JMenuItem rotateCCW = new JMenuItem( rotateCCWAction ); rotate180degAction = new RotateSelectedPhotoAction( this, 180, "Rotate 180 deg", null, "Rotates the selected photo", KeyEvent.VK_R ); JMenuItem rotate180deg = new JMenuItem( rotate180degAction );
void createUI() { photoTransferHandler = new PhotoCollectionTransferHandler( this ); setTransferHandler( photoTransferHandler ); addMouseListener( this ); addMouseMotionListener( this ); // Create the popup menu popup = new JPopupMenu(); JMenuItem propsItem = new JMenuItem( "Properties" ); propsItem.addActionListener( this ); propsItem.setActionCommand( PHOTO_PROPS_CMD ); JMenuItem showItem = new JMenuItem( "Show image" ); showItem.addActionListener( this ); showItem.setActionCommand( PHOTO_SHOW_CMD ); JMenuItem rotateCW = new JMenuItem( "Rotate 90 deg CW" ); rotateCW.addActionListener( this ); rotateCW.setActionCommand( PHOTO_ROTATE_CW_CMD ); JMenuItem rotateCCW = new JMenuItem( "Rotate 90 deg CCW" ); rotateCCW.addActionListener( this ); rotateCCW.setActionCommand( PHOTO_ROTATE_CCW_CMD ); JMenuItem rotate180deg = new JMenuItem( "Rotate 180 degrees" ); rotate180deg.addActionListener( this ); rotate180deg.setActionCommand( PHOTO_ROTATE_180_CMD ); JMenuItem addToFolder = new JMenuItem( "Add to folder..." ); addToFolder.addActionListener( this ); addToFolder.setActionCommand( PHOTO_ADD_TO_FOLDER_CMD ); exportSelectedAction = new ExportSelectedAction( this, "Export selected...", null, "Export the selected photos to from archive database to image files", KeyEvent.VK_X ); JMenuItem exportSelected = new JMenuItem( exportSelectedAction ); popup.add( showItem ); popup.add( propsItem ); popup.add( rotateCW ); popup.add( rotateCCW ); popup.add( rotate180deg ); popup.add( addToFolder ); popup.add( exportSelected ); MouseListener popupListener = new PopupListener(); addMouseListener( popupListener ); }
Rectangle r = new Rectangle(e.getX(), e.getY(), 1, 1); scrollRectToVisible(r);
public void mouseDragged(MouseEvent e ) { switch ( dragType ) { case DRAG_TYPE_SELECT: handleSelectionDragEvent( e ); break; case DRAG_TYPE_DND: handleDnDDragEvent( e ); break; default: log.error( "Invalid drag type" ); } }
Thumbnail thumbnail = photo.getThumbnail(); if ( thumbnail != null ) { BufferedImage img = thumbnail.getImage(); int x = startx + (columnWidth - img.getWidth())/(int)2; int y = starty + (rowHeight - img.getHeight())/(int)2; g2.drawImage( img, new AffineTransform( 1f, 0f, 0f, 1f, x, y ), null ); if ( isSelected ) { Stroke prevStroke = g2.getStroke(); Color prevColor = g2.getColor(); g2.setStroke( new BasicStroke( 3.0f) ); g2.setColor( Color.BLUE ); g2.drawRect( x, y, img.getWidth(), img.getHeight() ); g2.setColor( prevColor ); g2.setStroke( prevStroke ); } ypos += ((int)img.getHeight())/2 + 4; }
Thumbnail thumbnail = null; if ( photo.hasThumbnail() ) { thumbnail = photo.getThumbnail(); } else { thumbnail = Thumbnail.getDefaultThumbnail(); if ( !thumbCreatorThread.isBusy() ) { thumbCreatorThread.createThumbnail( photo ); } } BufferedImage img = thumbnail.getImage(); int x = startx + (columnWidth - img.getWidth())/(int)2; int y = starty + (rowHeight - img.getHeight())/(int)2;
private void paintThumbnail( Graphics2D g2, PhotoInfo photo, int startx, int starty, boolean isSelected ) { // Current position in which attributes can be drawn int ypos = starty + rowHeight/2; if ( photo != null ) { Thumbnail thumbnail = photo.getThumbnail(); if ( thumbnail != null ) { // Find the position for the thumbnail BufferedImage img = thumbnail.getImage(); int x = startx + (columnWidth - img.getWidth())/(int)2; int y = starty + (rowHeight - img.getHeight())/(int)2; g2.drawImage( img, new AffineTransform( 1f, 0f, 0f, 1f, x, y ), null ); if ( isSelected ) { Stroke prevStroke = g2.getStroke(); Color prevColor = g2.getColor(); g2.setStroke( new BasicStroke( 3.0f) ); g2.setColor( Color.BLUE ); g2.drawRect( x, y, img.getWidth(), img.getHeight() ); g2.setColor( prevColor ); g2.setStroke( prevStroke ); } // Increase ypos so that attributes are drawn under the image ypos += ((int)img.getHeight())/2 + 4; } // Draw the attributes Color prevBkg = g2.getBackground(); if ( isSelected ) { g2.setBackground( Color.BLUE ); } Font attrFont = new Font( "Arial", Font.PLAIN, 10 ); FontRenderContext frc = g2.getFontRenderContext(); if ( showDate && photo.getShootTime() != null ) { DateFormat df = new SimpleDateFormat( "dd.MM.yyyy k:mm" ); String dateStr = df.format( photo.getShootTime() ); TextLayout txt = new TextLayout( dateStr, attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth - bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } String shootPlace = photo.getShootingPlace(); if ( showPlace && shootPlace != null && shootPlace.length() > 0 ) { TextLayout txt = new TextLayout( photo.getShootingPlace(), attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth-bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } g2.setBackground( prevBkg ); } }
Color prevBkg = g2.getBackground(); if ( isSelected ) { g2.setBackground( Color.BLUE ); } Font attrFont = new Font( "Arial", Font.PLAIN, 10 ); FontRenderContext frc = g2.getFontRenderContext(); if ( showDate && photo.getShootTime() != null ) { DateFormat df = new SimpleDateFormat( "dd.MM.yyyy k:mm" ); String dateStr = df.format( photo.getShootTime() ); TextLayout txt = new TextLayout( dateStr, attrFont, frc ); Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth - bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } String shootPlace = photo.getShootingPlace(); if ( showPlace && shootPlace != null && shootPlace.length() > 0 ) { TextLayout txt = new TextLayout( photo.getShootingPlace(), attrFont, frc ); Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth-bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } g2.setBackground( prevBkg ); }
g2.drawImage( img, new AffineTransform( 1f, 0f, 0f, 1f, x, y ), null ); if ( isSelected ) { Stroke prevStroke = g2.getStroke(); Color prevColor = g2.getColor(); g2.setStroke( new BasicStroke( 3.0f) ); g2.setColor( Color.BLUE ); g2.drawRect( x, y, img.getWidth(), img.getHeight() ); g2.setColor( prevColor ); g2.setStroke( prevStroke ); } ypos += ((int)img.getHeight())/2 + 4; Color prevBkg = g2.getBackground(); if ( isSelected ) { g2.setBackground( Color.BLUE ); } Font attrFont = new Font( "Arial", Font.PLAIN, 10 ); FontRenderContext frc = g2.getFontRenderContext(); if ( showDate && photo.getShootTime() != null ) { DateFormat df = new SimpleDateFormat( "dd.MM.yyyy k:mm" ); String dateStr = df.format( photo.getShootTime() ); TextLayout txt = new TextLayout( dateStr, attrFont, frc ); Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth - bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } String shootPlace = photo.getShootingPlace(); if ( showPlace && shootPlace != null && shootPlace.length() > 0 ) { TextLayout txt = new TextLayout( photo.getShootingPlace(), attrFont, frc ); Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth-bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } g2.setBackground( prevBkg ); }
private void paintThumbnail( Graphics2D g2, PhotoInfo photo, int startx, int starty, boolean isSelected ) { // Current position in which attributes can be drawn int ypos = starty + rowHeight/2; if ( photo != null ) { Thumbnail thumbnail = photo.getThumbnail(); if ( thumbnail != null ) { // Find the position for the thumbnail BufferedImage img = thumbnail.getImage(); int x = startx + (columnWidth - img.getWidth())/(int)2; int y = starty + (rowHeight - img.getHeight())/(int)2; g2.drawImage( img, new AffineTransform( 1f, 0f, 0f, 1f, x, y ), null ); if ( isSelected ) { Stroke prevStroke = g2.getStroke(); Color prevColor = g2.getColor(); g2.setStroke( new BasicStroke( 3.0f) ); g2.setColor( Color.BLUE ); g2.drawRect( x, y, img.getWidth(), img.getHeight() ); g2.setColor( prevColor ); g2.setStroke( prevStroke ); } // Increase ypos so that attributes are drawn under the image ypos += ((int)img.getHeight())/2 + 4; } // Draw the attributes Color prevBkg = g2.getBackground(); if ( isSelected ) { g2.setBackground( Color.BLUE ); } Font attrFont = new Font( "Arial", Font.PLAIN, 10 ); FontRenderContext frc = g2.getFontRenderContext(); if ( showDate && photo.getShootTime() != null ) { DateFormat df = new SimpleDateFormat( "dd.MM.yyyy k:mm" ); String dateStr = df.format( photo.getShootTime() ); TextLayout txt = new TextLayout( dateStr, attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth - bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } String shootPlace = photo.getShootingPlace(); if ( showPlace && shootPlace != null && shootPlace.length() > 0 ) { TextLayout txt = new TextLayout( photo.getShootingPlace(), attrFont, frc ); // Calculate the position for the text Rectangle2D bounds = txt.getBounds(); int xpos = startx + ((int)(columnWidth-bounds.getWidth()))/2 - (int)bounds.getMinX(); g2.clearRect( xpos-2, ypos-2, (int)bounds.getWidth()+4, (int)bounds.getHeight()+4 ); txt.draw( g2, xpos, (int)(ypos + bounds.getHeight()) ); ypos += bounds.getHeight() + 4; } g2.setBackground( prevBkg ); } }
logger.debug(toString());
logger.debug("posting report: "+toString());
public void postReport() { logger.debug(toString()); if (numReportsThisRun++ > MAX_REPORT_TRIES) { logger.info( String.format( "Not logging this error, threshold of %d exceeded", MAX_REPORT_TRIES)); return; } exception.printStackTrace(); String url = System.getProperty(REPORT_URL_SYSTEM_PROP); if (url == null) { url = DEFAULT_REPORT_URL; } // TODO decouple this from the main frame UserSettings settings = ArchitectFrame.getMainInstance().getUserSettings().getQfaUserSettings(); if(!settings.getBoolean(QFAUserSettings.EXCEPTION_REPORTING,true)) return; logger.info("Posting error report to SQL Power at URL <"+url+">"); try { HttpURLConnection dest = (HttpURLConnection) new URL(url).openConnection(); dest.setDoOutput(true); dest.setDoInput(true); dest.setUseCaches(false); dest.setRequestMethod("POST"); dest.setRequestProperty("Content-Type", "text/xml"); dest.connect(); OutputStream out = null; try { out = new BufferedOutputStream(dest.getOutputStream()); out.write(toXML().getBytes("ISO-8859-1")); out.flush(); } finally { if (out != null) out.close(); } // Note: the error report will only get sent if we attempt to read from the URL Connection (!??!?) BufferedReader in = new BufferedReader(new InputStreamReader(dest.getInputStream())); StringBuffer response = new StringBuffer(); String line; while ((line = in.readLine()) != null) { response.append(line); } in.close(); logger.info("Error report servlet response: "+response); } catch (Exception e) { // Just catch-and-squash everything because we're already in up to our necks at this point. logger.error("Couldn't send exception report to <\""+url+"\">", e); } }
BufferedReader in = new BufferedReader(new InputStreamReader(dest.getInputStream()));
InputStreamReader inputStreamReader = new InputStreamReader(dest.getInputStream()); BufferedReader in = new BufferedReader(inputStreamReader);
public void postReport() { logger.debug(toString()); if (numReportsThisRun++ > MAX_REPORT_TRIES) { logger.info( String.format( "Not logging this error, threshold of %d exceeded", MAX_REPORT_TRIES)); return; } exception.printStackTrace(); String url = System.getProperty(REPORT_URL_SYSTEM_PROP); if (url == null) { url = DEFAULT_REPORT_URL; } // TODO decouple this from the main frame UserSettings settings = ArchitectFrame.getMainInstance().getUserSettings().getQfaUserSettings(); if(!settings.getBoolean(QFAUserSettings.EXCEPTION_REPORTING,true)) return; logger.info("Posting error report to SQL Power at URL <"+url+">"); try { HttpURLConnection dest = (HttpURLConnection) new URL(url).openConnection(); dest.setDoOutput(true); dest.setDoInput(true); dest.setUseCaches(false); dest.setRequestMethod("POST"); dest.setRequestProperty("Content-Type", "text/xml"); dest.connect(); OutputStream out = null; try { out = new BufferedOutputStream(dest.getOutputStream()); out.write(toXML().getBytes("ISO-8859-1")); out.flush(); } finally { if (out != null) out.close(); } // Note: the error report will only get sent if we attempt to read from the URL Connection (!??!?) BufferedReader in = new BufferedReader(new InputStreamReader(dest.getInputStream())); StringBuffer response = new StringBuffer(); String line; while ((line = in.readLine()) != null) { response.append(line); } in.close(); logger.info("Error report servlet response: "+response); } catch (Exception e) { // Just catch-and-squash everything because we're already in up to our necks at this point. logger.error("Couldn't send exception report to <\""+url+"\">", e); } }
logger.debug("Finished posting report");
public void postReport() { logger.debug(toString()); if (numReportsThisRun++ > MAX_REPORT_TRIES) { logger.info( String.format( "Not logging this error, threshold of %d exceeded", MAX_REPORT_TRIES)); return; } exception.printStackTrace(); String url = System.getProperty(REPORT_URL_SYSTEM_PROP); if (url == null) { url = DEFAULT_REPORT_URL; } // TODO decouple this from the main frame UserSettings settings = ArchitectFrame.getMainInstance().getUserSettings().getQfaUserSettings(); if(!settings.getBoolean(QFAUserSettings.EXCEPTION_REPORTING,true)) return; logger.info("Posting error report to SQL Power at URL <"+url+">"); try { HttpURLConnection dest = (HttpURLConnection) new URL(url).openConnection(); dest.setDoOutput(true); dest.setDoInput(true); dest.setUseCaches(false); dest.setRequestMethod("POST"); dest.setRequestProperty("Content-Type", "text/xml"); dest.connect(); OutputStream out = null; try { out = new BufferedOutputStream(dest.getOutputStream()); out.write(toXML().getBytes("ISO-8859-1")); out.flush(); } finally { if (out != null) out.close(); } // Note: the error report will only get sent if we attempt to read from the URL Connection (!??!?) BufferedReader in = new BufferedReader(new InputStreamReader(dest.getInputStream())); StringBuffer response = new StringBuffer(); String line; while ((line = in.readLine()) != null) { response.append(line); } in.close(); logger.info("Error report servlet response: "+response); } catch (Exception e) { // Just catch-and-squash everything because we're already in up to our necks at this point. logger.error("Couldn't send exception report to <\""+url+"\">", e); } }
T.add((getSubscribersLink(topicID,String.valueOf(emailCount))),4,row);
T.add((getSubscribersLink(topic.getListId(),String.valueOf(emailCount))),4,row);
public PresentationObject getTopicsOverView(IWContext iwc){ Table T = new Table(); int row = 1; T.add(getTopicLink(-1,iwrb.getLocalizedString("new_topic","New topic")),1,row); row++; T.add(tf.format(iwrb.getLocalizedString("name","Name"),tf.HEADER),1,row); T.add(tf.format(iwrb.getLocalizedString("category","Category"),tf.HEADER),2,row); T.add(tf.format(iwrb.getLocalizedString("mail_server","Mail server"),tf.HEADER),3,row); T.add(tf.format(iwrb.getLocalizedString("subscribers","Subscribers"),tf.HEADER),4,row); T.add(tf.format(iwrb.getLocalizedString("welcome","Welcome"),tf.HEADER),5,row); row++; if(!topics.isEmpty()){ Iterator iter = topics.values().iterator(); EmailTopic topic; ICCategory category; EmailAccount account; EmailLetter welcome; Collection welcomes; Collection accounts; int emailCount; int topicID; while(iter.hasNext()){ topic = (EmailTopic) iter.next(); topicID = topic.getIdentifier().intValue(); T.add(getTopicLink(topicID,topic.getName()),1,row); category = (ICCategory) categories.get(Integer.toString(topic.getCategoryId())); T.add(tf.format(category.getName()),2,row); accounts = MailFinder.getInstance().getTopicAccounts(topicID,MailProtocol.SMTP); if(accounts!=null && !accounts.isEmpty()){ account = (EmailAccount) accounts.iterator().next(); T.add(getAccountLink(topicID,( account.getIdentifier().intValue()),account.getHost()),3,row); } else{ T.add(getAccountLink(topicID,-1,"X"),3,row); } emailCount = MailFinder.getInstance().getListEmailsCount(topic.getListId()); T.add((getSubscribersLink(topicID,String.valueOf(emailCount))),4,row); welcomes = MailFinder.getInstance().getEmailLetters(topicID,MailLetter.TYPE_SUBSCRIPTION); if(welcomes!=null && !welcomes.isEmpty()){ welcome = (MailLetter) welcomes.iterator().next(); T.add(getWelcomeLetterLink(welcome.getIdentifier().intValue(),topicID,welcome.getSubject()),5,row); //T.add(tf.format(welcome.getSubject()),5,row); } else{ T.add(getWelcomeLetterLink(-1,topicID,"X"),5,row); } row++; } } return T; }
pairwiseButton.setSelected(true);
tripleButton.setSelected(true);
public void refreshTable(){ this.removeAll(); snpsByName = new Hashtable(); this.setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); Vector columnNames = new Vector(); Vector tableData = new Vector(); columnNames.add("#"); columnNames.add("Name"); columnNames.add("Position"); columnNames.add("Force Include"); columnNames.add("Force Exclude"); columnNames.add("Tag this SNP?"); for (int i = 0; i < Chromosome.getSize(); i++){ SNP tempSNP = Chromosome.getMarker(i); snpsByName.put(tempSNP.getName(), tempSNP); Vector tempData = new Vector(); tempData.add(Integer.toString(Chromosome.realIndex[i]+1)); tempData.add(tempSNP.getName()); tempData.add(String.valueOf(tempSNP.getPosition())); tempData.add(new Boolean(false)); tempData.add(new Boolean(false)); tempData.add(new Boolean(true)); tableData.add(tempData); } TagConfigTableModel tableModel = new TagConfigTableModel(columnNames,tableData); tableModel.addTableModelListener(this); table = new JTable(tableModel); table.getColumnModel().getColumn(0).setPreferredWidth(30); JScrollPane scrollPane = new JScrollPane(table); scrollPane.setMaximumSize(scrollPane.getPreferredSize()); add(scrollPane); JPanel optsRightPanel = new JPanel(); optsRightPanel.setLayout(new BoxLayout(optsRightPanel, BoxLayout.Y_AXIS)); JPanel rsqPanel = new JPanel(); JLabel rsqLabel = new JLabel("r\u00b2 threshold"); rsqPanel.add(rsqLabel); rsqField = new NumberTextField(String.valueOf(Options.getTaggerRsqCutoff()),5,true); rsqPanel.add(rsqField); optsRightPanel.add(rsqPanel); JPanel lodPanel = new JPanel(); JLabel lodLabel = new JLabel("LOD threshold for multi-marker tests"); lodPanel.add(lodLabel); lodField = new NumberTextField(String.valueOf(Options.getTaggerLODCutoff()),5,true); lodPanel.add(lodField); optsRightPanel.add(lodPanel); JPanel optsLeftPanel = new JPanel(); optsLeftPanel.setLayout(new BoxLayout(optsLeftPanel, BoxLayout.Y_AXIS)); JRadioButton pairwiseButton = new JRadioButton("pairwise tagging only"); pairwiseButton.setActionCommand(String.valueOf(Tagger.PAIRWISE_ONLY)); optsLeftPanel.add(pairwiseButton); JRadioButton dupleButton = new JRadioButton("aggressive tagging: use 2-marker haplotypes"); dupleButton.setActionCommand(String.valueOf(Tagger.AGGRESSIVE_DUPLE)); optsLeftPanel.add(dupleButton); JRadioButton tripleButton = new JRadioButton("aggressive tagging: use 2- and 3-marker haplotypes"); tripleButton.setActionCommand(String.valueOf(Tagger.AGGRESSIVE_TRIPLE)); optsLeftPanel.add(tripleButton); aggressiveGroup = new ButtonGroup(); aggressiveGroup.add(pairwiseButton); aggressiveGroup.add(dupleButton); aggressiveGroup.add(tripleButton); pairwiseButton.setSelected(true); JPanel optsPanel = new JPanel(); optsPanel.add(optsLeftPanel); optsPanel.add(optsRightPanel); add(optsPanel); runTaggerButton = new JButton("Run Tagger"); runTaggerButton.addActionListener(this); resetTableButton = new JButton("Reset Table"); resetTableButton.addActionListener(this); JPanel buttonPanel = new JPanel(); buttonPanel.add(runTaggerButton); buttonPanel.add(resetTableButton); add(buttonPanel); }
Vector results = pedFile.getResults();
Vector results = null; if (pedFile != null){ results = pedFile.getResults(); }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
MarkerResult mr = (MarkerResult)results.elementAt(i);
MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2,
pos, maf, a1, a2,
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
double genoPC = mr.getGenoPercent(); if (prevPosition != Long.MIN_VALUE){ if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ thisMarker.setDup(1); prevMarker.setDup(2); }else{ thisMarker.setDup(2); prevMarker.setDup(1);
if (mr != null){ double genoPC = mr.getGenoPercent(); if (prevPosition != Long.MIN_VALUE){ if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ thisMarker.setDup(1); prevMarker.setDup(2); }else{ thisMarker.setDup(2); prevMarker.setDup(1); }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
} prevPosition = pos; prevMarker = thisMarker;
prevPosition = pos; prevMarker = thisMarker; }
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2));
markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2));
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else if (infile != null){ //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = pedFile.getResults(); long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = (MarkerResult)results.elementAt(i); //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, Math.rint(mr.getMAF()*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= mr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(mr.getMAF()*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
service.addApplication(Utils.getServiceContext(context), appConfigData);
service.addAppWithDashboard(Utils.getServiceContext(context), appConfigData);
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception{ ApplicationForm appForm = (ApplicationForm)actionForm; /* create ApplicationConfigData from this form */ ApplicationConfigData appConfigData = new ApplicationConfigData(); CoreUtils.copyProperties(appConfigData, appForm); Map<String, String> paramValues = new HashMap<String, String>(); if(appForm.getJndiFactory() != null) paramValues.put(ApplicationConfig.JNDI_FACTORY, appForm.getJndiFactory()); if(appForm.getJndiURL() != null) paramValues.put(ApplicationConfig.JNDI_URL, appForm.getJndiURL()); appConfigData.setParamValues(paramValues); ConfigurationService service = ServiceFactory.getConfigurationService(); service.addApplication(Utils.getServiceContext(context), appConfigData); return mapping.findForward(Forwards.SUCCESS); }
public UserList(int count, String namePrefix, String domain, String password) { this(count, namePrefix, domain); m_password = password;
public UserList(int count, String namePrefix, String domain) { m_count = count; m_namePrefix = namePrefix; m_domain = domain;
public UserList(int count, String namePrefix, String domain, String password) { this(count, namePrefix, domain); m_password = password; }
p.add(filename = new JTextField((ddlg.getFile() == null ? "" : ddlg.getFile().getPath()), 35));
File outFile = ddlg.getFile(); if (outFile == null) { outFile = new File(System.getProperty("user.dir"), project.getName()+".ddl"); } p.add(filename = new JTextField((outFile.getPath()), 35));
protected void setup() { GenericDDLGenerator ddlg = project.getDDLGenerator(); setLayout(new FormLayout()); add(new JLabel("Allow Connection?")); add(allowConnection = new JCheckBox()); allowConnection.setSelected(ddlg.getAllowConnection()); add(new JLabel("Output File")); JPanel p = new JPanel(new FlowLayout()); p.add(filename = new JTextField((ddlg.getFile() == null ? "" : ddlg.getFile().getPath()), 35)); p.add(fileChooserButton = new JButton("...")); fileChooserButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { JFileChooser fc = new JFileChooser(); fc.addChoosableFileFilter(ASUtils.sqlFileFilter); fc.setSelectedFile(new File(filename.getText())); int rv = fc.showDialog(DDLExportPanel.this, "Ok"); if (rv == JFileChooser.APPROVE_OPTION) { filename.setText(fc.getSelectedFile().getPath()); } } }); add(p); Vector dbTypeList = new Vector(); dbTypeList.add(ASUtils.lvb("Generic JDBC", GenericDDLGenerator.class)); dbTypeList.add(ASUtils.lvb("DB2", DB2DDLGenerator.class)); dbTypeList.add(ASUtils.lvb("Oracle 8i/9i", OracleDDLGenerator.class)); dbTypeList.add(ASUtils.lvb("SQLServer 2000", SQLServerDDLGenerator.class)); add(new JLabel("Database Type")); add(dbType = new JComboBox(dbTypeList)); if (ddlg.getClass() == GenericDDLGenerator.class) { dbType.setSelectedIndex(0); } else if (ddlg.getClass() == DB2DDLGenerator.class) { dbType.setSelectedIndex(1); } else if (ddlg.getClass() == OracleDDLGenerator.class) { dbType.setSelectedIndex(2); } else if (ddlg.getClass() == SQLServerDDLGenerator.class) { dbType.setSelectedIndex(3); } else { logger.error("Unknown DDL generator class "+ddlg.getClass()); dbType.addItem(ASUtils.lvb("Unknwon Generator", ddlg.getClass())); } }
registerTag("setTimeZone", SetTimeZoneTag.class); registerTag("timeZone", TimeZoneTag.class);
public FmtTagLibrary() { registerTag("bundle", BundleTag.class); registerTag("message", MessageTag.class); registerTag("param", ParamTag.class); registerTag("setLocale", SetLocaleTag.class); }
System.out.println("SQLObject: firing dbChildrenInserted event");
System.out.println(getClass().getName()+": firing dbChildrenInserted event");
protected void fireDbChildrenInserted(int[] newIndices, List newChildren) { System.out.println("SQLObject: firing dbChildrenInserted event"); SQLObjectEvent e = new SQLObjectEvent (this, newIndices, (SQLObject[]) newChildren.toArray(new SQLObject[newChildren.size()])); Iterator it = getSqlObjectListeners().iterator(); int count = 0; while (it.hasNext()) { count ++; ((SQLObjectListener) it.next()).dbChildrenInserted(e); } System.out.println("SQLObject: notified "+count+" listeners"); }
System.out.println("SQLObject: notified "+count+" listeners");
System.out.println(getClass().getName()+": notified "+count+" listeners");
protected void fireDbChildrenInserted(int[] newIndices, List newChildren) { System.out.println("SQLObject: firing dbChildrenInserted event"); SQLObjectEvent e = new SQLObjectEvent (this, newIndices, (SQLObject[]) newChildren.toArray(new SQLObject[newChildren.size()])); Iterator it = getSqlObjectListeners().iterator(); int count = 0; while (it.hasNext()) { count ++; ((SQLObjectListener) it.next()).dbChildrenInserted(e); } System.out.println("SQLObject: notified "+count+" listeners"); }
expression = createExpression( localName, attributeName, attributeValue );
expression = createConstantExpression( localName, attributeName, attributeValue );
protected TagScript createStaticTag( String namespaceURI, String localName, Attributes list ) throws SAXException { try { Tag tag = new StaticTag( namespaceURI, localName ); TagScript script = new TagScript( tag ); // now iterate through through the expressions int size = list.getLength(); for ( int i = 0; i < size; i++ ) { String attributeName = list.getLocalName(i); String attributeValue = list.getValue(i); Expression expression = getExpressionFactory().createExpression( attributeValue ); if ( expression == null ) { expression = createExpression( localName, attributeName, attributeValue ); } script.addAttribute( attributeName, expression ); } return script; } catch (Exception e) { log.warn( "Could not create static tag for URI: " + namespaceURI + " tag name: " + localName, e ); throw createSAXException(e); } }
expression = createExpression( localName, attributeName, attributeValue );
expression = createConstantExpression( localName, attributeName, attributeValue );
protected TagScript createTag( String namespaceURI, String localName, Attributes list ) throws SAXException { try { // use the URI to load a taglib TagLibrary taglib = (TagLibrary) taglibs.get( namespaceURI ); if ( taglib == null ) { if ( namespaceURI != null && namespaceURI.startsWith( "jelly:" ) ) { String uri = namespaceURI.substring(6); // try to find the class on the claspath try { Class taglibClass = getClassLoader().loadClass( uri ); taglib = (TagLibrary) taglibClass.newInstance(); } catch (ClassNotFoundException e) { log.warn( "Could not load class: " + uri + " so disabling the taglib" ); } } } if ( taglib != null ) { TagScript script = taglib.createTagScript( localName, list ); // now iterate through through the expressions int size = list.getLength(); for ( int i = 0; i < size; i++ ) { String attributeName = list.getLocalName(i); String attributeValue = list.getValue(i); Expression expression = taglib.createExpression( getExpressionFactory(), localName, attributeName, attributeValue ); if ( expression == null ) { expression = createExpression( localName, attributeName, attributeValue ); } script.addAttribute( attributeName, expression ); } return script; } return null; } catch (Exception e) { log.warn( "Could not create taglib or URI: " + namespaceURI + " tag name: " + localName, e ); throw createSAXException(e); } catch (Throwable e) { log.warn( "Could not create taglib or URI: " + namespaceURI + " tag name: " + localName, e ); return null; } }
XMLParser parser = new XMLParser(); parser.setContext( getContext() ); Script script = parser.parse( getUrl().openStream() ); script = script.compile(); if ( log.isDebugEnabled() ) { log.debug( "Compiled script: " + getUrl() ); } return script; }
XMLParser parser = new XMLParser(); parser.setJellyContext(getJellyContext()); Script script = parser.parse(getUrl().openStream()); script = script.compile(); if (log.isDebugEnabled()) { log.debug("Compiled script: " + getUrl()); } return script; }
public Script compileScript() throws Exception { XMLParser parser = new XMLParser(); parser.setContext( getContext() ); Script script = parser.parse( getUrl().openStream() ); script = script.compile(); if ( log.isDebugEnabled() ) { log.debug( "Compiled script: " + getUrl() ); } return script; }
if ( args.length <= 0 ) { System.out.println( "Usage: Jelly scriptFile [outputFile]" ); return; } Jelly jelly = new Jelly(); jelly.setScript( args[0] ); Writer writer = new BufferedWriter( new OutputStreamWriter( System.out ) ); Script script = jelly.compileScript(); XMLOutput output = XMLOutput.createXMLOutput( writer ); Context context = jelly.getContext(); context.setVariable( "args", args ); script.run( context, output ); writer.close(); }
if (args.length <= 0) { System.out.println("Usage: Jelly scriptFile [outputFile]"); return; } Jelly jelly = new Jelly(); jelly.setScript(args[0]); Writer writer = new BufferedWriter(new OutputStreamWriter(System.out)); Script script = jelly.compileScript(); XMLOutput output = XMLOutput.createXMLOutput(writer); JellyContext context = jelly.getJellyContext(); context.setVariable("args", args); script.run(context, output); writer.close(); }
public static void main(String[] args) throws Exception { if ( args.length <= 0 ) { System.out.println( "Usage: Jelly scriptFile [outputFile]" ); return; } Jelly jelly = new Jelly(); jelly.setScript( args[0] ); /* // later we might wanna add some command line arguments // checking stuff using commons-cli to specify the output file // and input file via command line arguments Writer writer = ( args.length > 1 ) ? new FileWriter( args[1] ) : new OutputStreamWriter( System.out ); BufferedWriter output = new BufferedWriter( writer );*/ Writer writer = new BufferedWriter( new OutputStreamWriter( System.out ) ); Script script = jelly.compileScript(); XMLOutput output = XMLOutput.createXMLOutput( writer ); // add the system properties and the command line arguments Context context = jelly.getContext(); context.setVariable( "args", args ); script.run( context, output ); writer.close(); }
oldThumbnail = null;
protected void createThumbnail( VolumeBase volume ) { log.debug( "Creating thumbnail for " + uid ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Maximum size of the thumbnail int maxThumbWidth = 100; int maxThumbHeight = 100; checkCropBounds(); /* Determine the minimum size for the instance used for thumbnail creation to get decent image quality. The cropped portion of the image must be roughly the same resolution as the intended thumbnail. */ double cropWidth = cropMaxX - cropMinX; cropWidth = ( cropWidth > 0.000001 ) ? cropWidth : 1.0; double cropHeight = cropMaxY - cropMinY; cropHeight = ( cropHeight > 0.000001 ) ? cropHeight : 1.0; int minInstanceWidth = (int)(((double)maxThumbWidth)/cropWidth); int minInstanceHeight = (int)(((double)maxThumbHeight)/cropHeight); int minInstanceSide = Math.max( minInstanceWidth, minInstanceHeight ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.debug( "Found original, reading it..." ); /* We try to ensure that the thumbnail is actually from the original image by comparing aspect ratio of it to original. This is not a perfect check but it will usually catch the most typical errors (like having a the original rotated by RAW conversion SW but still the original EXIF thumbnail. */ double origAspect = this.getAspect( original.getWidth(), original.getHeight(), 1.0 ); double aspectAccuracy = 0.01; // First, check if there is a thumbnail in image header BufferedImage origImage = readExifThumbnail( original.getImageFile() ); if ( origImage == null || !isOkForThumbCreation( origImage.getWidth(), origImage.getHeight(), minInstanceWidth, minInstanceHeight, origAspect, aspectAccuracy ) ) { // Read the image try { Iterator readers = ImageIO.getImageReadersByFormatName( "jpg" ); if ( readers.hasNext() ) { ImageReader reader = (ImageReader)readers.next(); log.debug( "Creating stream" ); ImageInputStream iis = ImageIO.createImageInputStream( original.getImageFile() ); reader.setInput( iis, false, false ); int numThumbs = 0; try { int numImages = reader.getNumImages( true ); numThumbs = reader.getNumThumbnails(0); } catch (IOException ex) { ex.printStackTrace(); } if ( numThumbs > 0 && isOkForThumbCreation( reader.getThumbnailWidth( 0, 0 ), reader.getThumbnailHeight( 0, 0 ) , minInstanceWidth, minInstanceHeight, origAspect, aspectAccuracy ) ) { // There is a thumbanil that is big enough - use it log.debug( "Original has thumbnail, size " + reader.getThumbnailWidth( 0, 0 ) + " x " + reader.getThumbnailHeight( 0, 0 ) ); origImage = reader.readThumbnail( 0, 0 ); log.debug( "Read thumbnail" ); } else { log.debug( "No thumbnail in original" ); ImageReadParam param = reader.getDefaultReadParam(); // Find the maximum subsampling rate we can still use for creating // a quality thumbnail int subsampling = 1; int minDim = Math.min( reader.getWidth( 0 ),reader.getHeight( 0 ) ); while ( 2 * minInstanceSide * subsampling < minDim ) { subsampling *= 2; } param.setSourceSubsampling( subsampling, subsampling, 0, 0 ); origImage = reader.read( 0, param ); log.debug( "Read original" ); } iis.close(); } } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } } log.debug( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.debug( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); AffineTransform xform = org.photovault.image.ImageXform.getRotateXform( prefRotation -original.getRotated(), origWidth, origHeight ); ParameterBlockJAI rotParams = new ParameterBlockJAI( "affine" ); rotParams.addSource( origImage ); rotParams.setParameter( "transform", xform ); rotParams.setParameter( "interpolation", Interpolation.getInstance( Interpolation.INTERP_NEAREST ) ); RenderedOp rotatedImage = JAI.create( "affine", rotParams ); ParameterBlockJAI cropParams = new ParameterBlockJAI( "crop" ); cropParams.addSource( rotatedImage ); float cropX = (float)( Math.rint( rotatedImage.getMinX() + cropMinX * rotatedImage.getWidth() ) ); float cropY = (float)( Math.rint( rotatedImage.getMinY() + cropMinY * rotatedImage.getHeight())); float cropW = (float)( Math.rint((cropWidth) * rotatedImage.getWidth() ) ); float cropH = (float) ( Math.rint((cropHeight) * rotatedImage.getHeight() )); cropParams.setParameter( "x", cropX ); cropParams.setParameter( "y", cropY ); cropParams.setParameter( "width", cropW ); cropParams.setParameter( "height", cropH ); RenderedOp cropped = JAI.create("crop", cropParams, null); // Translate the image so that it begins in origo ParameterBlockJAI pbXlate = new ParameterBlockJAI( "translate" ); pbXlate.addSource( cropped ); pbXlate.setParameter( "xTrans", (float) (-cropped.getMinX() ) ); pbXlate.setParameter( "yTrans", (float) (-cropped.getMinY() ) ); RenderedOp xformImage = JAI.create( "translate", pbXlate ); // Finally, scale this to thumbnail AffineTransform thumbScale = org.photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, 0, xformImage.getWidth(), xformImage.getHeight() ); ParameterBlockJAI thumbScaleParams = new ParameterBlockJAI( "affine" ); thumbScaleParams.addSource( xformImage ); thumbScaleParams.setParameter( "transform", thumbScale ); thumbScaleParams.setParameter( "interpolation", Interpolation.getInstance( Interpolation.INTERP_NEAREST ) ); PlanarImage thumbImage = JAI.create( "affine", thumbScaleParams ); // Save it FileOutputStream out = null; try { out = new FileOutputStream(thumbnailFile.getAbsolutePath()); } catch(IOException e) { log.error( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } JPEGEncodeParam encodeParam = new JPEGEncodeParam(); ImageEncoder encoder = ImageCodec.createImageEncoder("JPEG", out, encodeParam); try { encoder.encode( thumbImage ); out.close(); // origImage.dispose(); thumbImage.dispose(); } catch (IOException e) { log.error( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); thumbInstance.setCropBounds( getCropBounds() ); log.debug( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); log.debug( "Thumbnail loaded" ); txw.commit(); }
oldThumbnail = null;
public Thumbnail getExistingThumbnail() { if ( thumbnail == null ) { log.debug( "Finding thumbnail from database" ); // First try to find an instance from existing instances ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_THUMBNAIL && Math.abs(instance.getRotated() - prefRotation) < 0.0001 && instance.getCropBounds().equals( getCropBounds() ) ) { log.debug( "Found thumbnail from database" ); thumbnail = Thumbnail.createThumbnail( this, instance.getImageFile() ); break; } } } return thumbnail; }
oldThumbnail = null;
public Thumbnail getThumbnail() { log.debug( "getThumbnail: entry, Finding thumbnail for " + uid ); if ( thumbnail == null ) { thumbnail = getExistingThumbnail(); if ( thumbnail == null ) { // Next try to create a new thumbnail instance log.debug( "No thumbnail found, creating" ); createThumbnail(); } } if ( thumbnail == null ) { // Thumbnail was not successful created, most probably because there // is no available instance thumbnail = Thumbnail.getDefaultThumbnail(); } log.debug( "getThumbnail: exit" ); return thumbnail; }
thumbnail = null;
invalidateThumbnail();
public void setCropBounds( Rectangle2D cropBounds ) { ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); if ( !cropBounds.equals( getCropBounds() ) ) { // Rotation changes, invalidate the thumbnail thumbnail = null; } cropMinX = cropBounds.getMinX(); cropMinY = cropBounds.getMinY(); cropMaxX = cropBounds.getMaxX(); cropMaxY = cropBounds.getMaxY(); checkCropBounds(); modified(); txw.commit(); }
thumbnail = null;
invalidateThumbnail();
public void setPrefRotation(double v) { ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); if ( v != prefRotation ) { // Rotation changes, invalidate the thumbnail thumbnail = null; } this.prefRotation = v; modified(); txw.commit(); }
String newPhotographer = "New photographer"; ctrl.setField( PhotoInfoController.PHOTOGRAPHER, newPhotographer ); try { ctrl.save(); } catch ( Exception e ) { fail( "Exception while saving: " + e.getMessage() ); } assertEquals( "PhotoInfo fields should match ctrl", newPhotographer, photo.getPhotographer() ); try { PhotoInfo photo2 = PhotoInfo.retrievePhotoInfo( photo.getUid() ); assertEquals( photo2.getPhotographer(), photo.getPhotographer() ); assertTrue( photo2.getFStop() == photo.getFStop() ); } catch ( PhotoNotFoundException e ) { fail ( "inserted photo not found" ); }
public void testNewPhotoCreation() { File testFile = new File( "c:\\java\\photovault\\testfiles\\test1.jpg" ); ctrl.createNewPhoto( testFile ); String photographer = "Test photographer"; ctrl.setField( PhotoInfoController.PHOTOGRAPHER, photographer ); assertEquals( photographer, ctrl.getField( PhotoInfoController.PHOTOGRAPHER ) ); // Saving the ctrl state should create a new photo object try { ctrl.save(); } catch ( Exception e ) { fail( "Exception while saving: " + e.getMessage() ); } PhotoInfo photo = ctrl.getPhoto(); assertTrue( "getPhoto should return PhotoInfo object after save()", photo != null ); assertEquals( "PhotoInfo fields should match ctrl", photographer, photo.getPhotographer() ); // Check the database also try { PhotoInfo photo2 = PhotoInfo.retrievePhotoInfo( photo.getUid() ); assertEquals( photo2.getPhotographer(), photo.getPhotographer() ); assertTrue( photo2.getFStop() == photo.getFStop() ); } catch ( PhotoNotFoundException e ) { fail ( "inserted photo not found" ); } photo.delete(); }
Expression expression = (Expression) entry.getValue();
if(name.indexOf(':')!=-1) name = name.substring(name.indexOf(':')+1); ExpressionAttribute expat = (ExpressionAttribute) entry.getValue(); Expression expression = expat.exp;
public void run(JellyContext context, XMLOutput output) throws JellyTagException { try { startNamespacePrefixes(output); } catch (SAXException e) { throw new JellyTagException("could not start namespace prefixes",e); } Tag tag = null; try { tag = getTag(context); // lets see if we have a dynamic tag if (tag instanceof StaticTag) { tag = findDynamicTag(context, (StaticTag) tag); } setTag(tag,context); } catch (JellyException e) { throw new JellyTagException(e); } URL rootURL = context.getRootURL(); URL currentURL = context.getCurrentURL(); try { if ( tag == null ) { return; } tag.setContext(context); setContextURLs(context); DynaTag dynaTag = (DynaTag) tag; // ### probably compiling this to 2 arrays might be quicker and smaller for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = null; if ( Expression.class.isAssignableFrom( dynaTag.getAttributeType(name) ) ) { value = expression; } else { value = expression.evaluate(context); } dynaTag.setAttribute(name, value); } tag.doTag(output); } catch (JellyTagException e) { handleException(e); } catch (RuntimeException e) { handleException(e); } finally { context.setCurrentURL(currentURL); context.setRootURL(rootURL); } try { endNamespacePrefixes(output); } catch (SAXException e) { throw new JellyTagException("could not end namespace prefixes",e); } }
dynaTag.setAttribute(name, value);
if(expat.prefix!=null || expat.prefix.length()>0 && tag instanceof StaticTag) ((StaticTag) dynaTag).setAttribute(name,expat.prefix, expat.nsURI,value); else dynaTag.setAttribute(name, value);
public void run(JellyContext context, XMLOutput output) throws JellyTagException { try { startNamespacePrefixes(output); } catch (SAXException e) { throw new JellyTagException("could not start namespace prefixes",e); } Tag tag = null; try { tag = getTag(context); // lets see if we have a dynamic tag if (tag instanceof StaticTag) { tag = findDynamicTag(context, (StaticTag) tag); } setTag(tag,context); } catch (JellyException e) { throw new JellyTagException(e); } URL rootURL = context.getRootURL(); URL currentURL = context.getCurrentURL(); try { if ( tag == null ) { return; } tag.setContext(context); setContextURLs(context); DynaTag dynaTag = (DynaTag) tag; // ### probably compiling this to 2 arrays might be quicker and smaller for (Iterator iter = attributes.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = null; if ( Expression.class.isAssignableFrom( dynaTag.getAttributeType(name) ) ) { value = expression; } else { value = expression.evaluate(context); } dynaTag.setAttribute(name, value); } tag.doTag(output); } catch (JellyTagException e) { handleException(e); } catch (RuntimeException e) { handleException(e); } finally { context.setCurrentURL(currentURL); context.setRootURL(rootURL); } try { endNamespacePrefixes(output); } catch (SAXException e) { throw new JellyTagException("could not end namespace prefixes",e); } }
System.out.println("The current Name is the same as the old name");
logger.debug("The current Name is the same as the old name");
public void actionPerformed(ActionEvent e) { logger.debug("DBCS Action invoked"); ArchitectDataSource newDS = dbcsPanel.getDbcs(); String curName = null; for (Component c : ((TextPanel)dbcsPanel.getComponents()[0]).getComponents()) { if ("dbNameField".equals(c.getName())){ curName = ((JTextField) c).getText(); } } if (curName == null ) { throw new ArchitectRuntimeException(new ArchitectException("DBCS Panel improperly intialized")); } if (isNew) { dbcsPanel.applyChanges(); if ("".equals(newDS.getName().trim())) { JOptionPane.showMessageDialog(newConnectionDialog,"A connection must have at least 1 character that is not whitespace"); newConnectionDialog.setVisible(true); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); if (plDotIni.getDataSource(newDS.getName()) == null ) { plDotIni.addDataSource(newDS); if (connectionSelectionCallBack != null) { connectionSelectionCallBack.selectDBConnection(newDS); } } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+curName+"\" already exists"); newConnectionDialog.setVisible(true); } } } else if ("".equals(curName.trim())) { JOptionPane.showMessageDialog(newConnectionDialog,"A connection must have at least 1 character that is not whitespace"); newConnectionDialog.setVisible(true); } else if (curName.equals(oldName)) { System.out.println("The current Name is the same as the old name"); dbcsPanel.applyChanges(); } else { PlDotIni plDotIni = ArchitectFrame.getMainInstance().getUserSettings().getPlDotIni(); ArchitectDataSource dataSource = plDotIni.getDataSource(curName); if (dataSource == null ) { dbcsPanel.applyChanges(); } else { JOptionPane.showMessageDialog(newConnectionDialog,"A connection with the name \""+curName+"\" already exists"); newConnectionDialog.setVisible(true); } } }
} else { this.shouldExport = false;
public void setExport(String export) { if ("true".equals(export)) { this.shouldExport = true; } }
} else { this.shouldInherit = false;
public void setInherit(String inherit) { if ("true".equals(inherit)) { this.shouldInherit = true; } }
public void characters(char ch[], int start, int length) throws SAXException {
public void characters(char[] ch, int start, int length) throws SAXException {
public void characters(char ch[], int start, int length) throws SAXException { contentHandler.characters(ch, start, length); }
if (log.isDebugEnabled()) log.debug("error setting lexical handler properties", e);
if (log.isDebugEnabled()) { log.debug("error setting lexical handler properties", e); }
public static XMLOutput createXMLOutput(XMLReader xmlReader) { XMLOutput output = new XMLOutput(xmlReader.getContentHandler()); // isn't it lovely what we've got to do to find the LexicalHandler... ;-) for (int i = 0; i < LEXICAL_HANDLER_NAMES.length; i++) { try { Object value = xmlReader.getProperty(LEXICAL_HANDLER_NAMES[i]); if (value instanceof LexicalHandler) { output.setLexicalHandler((LexicalHandler) value); break; } } catch (Exception e) { // ignore any unsupported-operation exceptions if (log.isDebugEnabled()) log.debug("error setting lexical handler properties", e); } } return output; }
contentHandler.endPrefixMapping(prefix);
namespaceStack.popNamespace(prefix);
public void endPrefixMapping(String prefix) throws SAXException { contentHandler.endPrefixMapping(prefix); }
} else if (contentHandler instanceof XMLOutput) { ((XMLOutput)contentHandler).flush();
public void flush() throws IOException { if( contentHandler instanceof XMLWriter ) { ((XMLWriter)contentHandler).flush(); } }
public void ignorableWhitespace(char ch[], int start, int length)
public void ignorableWhitespace(char[] ch, int start, int length)
public void ignorableWhitespace(char ch[], int start, int length) throws SAXException { contentHandler.ignorableWhitespace(ch, start, length); }
contentHandler.startPrefixMapping(prefix, uri);
namespaceStack.pushNamespace(prefix, uri);
public void startPrefixMapping(String prefix, String uri) throws SAXException { contentHandler.startPrefixMapping(prefix, uri); }
if (Options.getAssocTest() == 2) {
if (Options.getAssocTest() == ASSOC_TRIO) {
private void full_em_breakup( byte[][] input_haplos, int[] block_size, int numTrios, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (Options.getAssocTest() == 2) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ Vector caseFreqs = new Vector(); Vector controlFreqs = new Vector(); //suffers from OCD :) double[] tempCase, tempControl, totalCase, totalControl; if (Options.getAssocTest() == 1){ tempCase = new double[poss_full]; tempControl = new double[poss_full]; totalCase = new double[poss_full]; totalControl = new double[poss_full]; double tempnorm=0; for (int i = numTrios*2; i < num_indivs; i++){ for (int n=0; n<superdata[i].nsuper; n++) { if (((Integer)affStatus.elementAt(i)).intValue() == 1){ tempControl[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempControl[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; }else if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempCase[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempCase[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; } tempnorm += superdata[i].superposs[n].p; } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempCase[j] > 0.0000 || tempControl[j] > 0.0000) { totalCase[j] += (tempCase[j]/tempnorm); totalControl[j] += (tempControl[j]/tempnorm); tempCase[j]=tempControl[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { caseFreqs.add(new Double(totalCase[j])); controlFreqs.add(new Double(totalControl[j])); } } } double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(Options.getAssocTest() == 2) { double tempnorm=0,product; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; if (Options.getAssocTest() == 2){ this.obsT = obsT; this.obsU = obsU; } else if (Options.getAssocTest() == 1){ this.caseFreqs = caseFreqs; this.controlFreqs = controlFreqs; } /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
if (Options.getAssocTest() == 1){
if (Options.getAssocTest() == ASSOC_CC){
private void full_em_breakup( byte[][] input_haplos, int[] block_size, int numTrios, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (Options.getAssocTest() == 2) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ Vector caseFreqs = new Vector(); Vector controlFreqs = new Vector(); //suffers from OCD :) double[] tempCase, tempControl, totalCase, totalControl; if (Options.getAssocTest() == 1){ tempCase = new double[poss_full]; tempControl = new double[poss_full]; totalCase = new double[poss_full]; totalControl = new double[poss_full]; double tempnorm=0; for (int i = numTrios*2; i < num_indivs; i++){ for (int n=0; n<superdata[i].nsuper; n++) { if (((Integer)affStatus.elementAt(i)).intValue() == 1){ tempControl[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempControl[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; }else if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempCase[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempCase[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; } tempnorm += superdata[i].superposs[n].p; } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempCase[j] > 0.0000 || tempControl[j] > 0.0000) { totalCase[j] += (tempCase[j]/tempnorm); totalControl[j] += (tempControl[j]/tempnorm); tempCase[j]=tempControl[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { caseFreqs.add(new Double(totalCase[j])); controlFreqs.add(new Double(totalControl[j])); } } } double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(Options.getAssocTest() == 2) { double tempnorm=0,product; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; if (Options.getAssocTest() == 2){ this.obsT = obsT; this.obsU = obsU; } else if (Options.getAssocTest() == 1){ this.caseFreqs = caseFreqs; this.controlFreqs = controlFreqs; } /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
if(Options.getAssocTest() == 2)
if(Options.getAssocTest() == ASSOC_TRIO)
private void full_em_breakup( byte[][] input_haplos, int[] block_size, int numTrios, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (Options.getAssocTest() == 2) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ Vector caseFreqs = new Vector(); Vector controlFreqs = new Vector(); //suffers from OCD :) double[] tempCase, tempControl, totalCase, totalControl; if (Options.getAssocTest() == 1){ tempCase = new double[poss_full]; tempControl = new double[poss_full]; totalCase = new double[poss_full]; totalControl = new double[poss_full]; double tempnorm=0; for (int i = numTrios*2; i < num_indivs; i++){ for (int n=0; n<superdata[i].nsuper; n++) { if (((Integer)affStatus.elementAt(i)).intValue() == 1){ tempControl[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempControl[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; }else if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempCase[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempCase[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; } tempnorm += superdata[i].superposs[n].p; } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempCase[j] > 0.0000 || tempControl[j] > 0.0000) { totalCase[j] += (tempCase[j]/tempnorm); totalControl[j] += (tempControl[j]/tempnorm); tempCase[j]=tempControl[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { caseFreqs.add(new Double(totalCase[j])); controlFreqs.add(new Double(totalControl[j])); } } } double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(Options.getAssocTest() == 2) { double tempnorm=0,product; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; if (Options.getAssocTest() == 2){ this.obsT = obsT; this.obsU = obsU; } else if (Options.getAssocTest() == 1){ this.caseFreqs = caseFreqs; this.controlFreqs = controlFreqs; } /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
} else if (Options.getAssocTest() == 1){
} else if (Options.getAssocTest() == ASSOC_CC){
private void full_em_breakup( byte[][] input_haplos, int[] block_size, int numTrios, Vector affStatus) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (Options.getAssocTest() == 2) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) {/* what we really need to do is go through and pare backto using a smaller number (e.g., > .002, .005)//printf("too many possibilities: %d\n",poss_full);return(-5);}*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ Vector caseFreqs = new Vector(); Vector controlFreqs = new Vector(); //suffers from OCD :) double[] tempCase, tempControl, totalCase, totalControl; if (Options.getAssocTest() == 1){ tempCase = new double[poss_full]; tempControl = new double[poss_full]; totalCase = new double[poss_full]; totalControl = new double[poss_full]; double tempnorm=0; for (int i = numTrios*2; i < num_indivs; i++){ for (int n=0; n<superdata[i].nsuper; n++) { if (((Integer)affStatus.elementAt(i)).intValue() == 1){ tempControl[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempControl[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; }else if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempCase[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempCase[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; } tempnorm += superdata[i].superposs[n].p; } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempCase[j] > 0.0000 || tempControl[j] > 0.0000) { totalCase[j] += (tempCase[j]/tempnorm); totalControl[j] += (tempControl[j]/tempnorm); tempCase[j]=tempControl[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { caseFreqs.add(new Double(totalCase[j])); controlFreqs.add(new Double(totalControl[j])); } } } double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(Options.getAssocTest() == 2) { double tempnorm=0,product; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; if (Options.getAssocTest() == 2){ this.obsT = obsT; this.obsU = obsU; } else if (Options.getAssocTest() == 1){ this.caseFreqs = caseFreqs; this.controlFreqs = controlFreqs; } /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
parent = new SQLTable();
protected void setUp() throws Exception { parent.addChild(child); }
public void run(Context context, XMLOutput output) throws Exception {
public void run(JellyContext context, XMLOutput output) throws Exception {
public void run(Context context, XMLOutput output) throws Exception { getBody().run( context, output ); }
double genoPct = 100.0*(het+hom)/(het+hom+missing); return genoPct;
if (het+hom+missing == 0){ return 0; }else{ return 100.0*(het+hom)/(het+hom+missing); }
private double getGenoPercent(int het, int hom, int missing){ double genoPct = 100.0*(het+hom)/(het+hom+missing); return genoPct; }
clickY-botEdgeShift,
clickY-botEdgeShift+smallDataVertSlop,
public void mousePressed (MouseEvent e) { Rectangle blockselector = new Rectangle(clickXShift-boxRadius,clickYShift - boxRadius, (Chromosome.getFilteredSize()*boxSize), boxSize); //if users right clicks & holds, pop up the info if ((e.getModifiers() & InputEvent.BUTTON3_MASK) == InputEvent.BUTTON3_MASK){ Graphics g = getGraphics(); g.setFont(boxFont); FontMetrics metrics = g.getFontMetrics(); PairwiseLinkage[][] dPrimeTable = theData.filteredDPrimeTable; final int clickX = e.getX(); final int clickY = e.getY(); double dboxX = (double)(clickX - clickXShift - (clickY-clickYShift))/boxSize; double dboxY = (double)(clickX - clickXShift + (clickY-clickYShift))/boxSize; final int boxX, boxY; if (dboxX < 0){ boxX = (int)(dboxX - 0.5); } else{ boxX = (int)(dboxX + 0.5); } if (dboxY < 0){ boxY = (int)(dboxY - 0.5); }else{ boxY = (int)(dboxY + 0.5); } if ((boxX >= lowX && boxX <= highX) && (boxY > boxX && boxY < highY) && !(worldmapRect.contains(clickX,clickY))){ if (dPrimeTable[boxX][boxY] != null){ displayStrings = new String[5]; if (theData.infoKnown){ displayStrings[0] = new String ("(" +Chromosome.getFilteredMarker(boxX).getName() + ", " + Chromosome.getFilteredMarker(boxY).getName() + ")"); }else{ displayStrings[0] = new String("(" + (Chromosome.realIndex[boxX]+1) + ", " + (Chromosome.realIndex[boxY]+1) + ")"); } displayStrings[1] = new String ("D': " + dPrimeTable[boxX][boxY].getDPrime()); displayStrings[2] = new String ("LOD: " + dPrimeTable[boxX][boxY].getLOD()); displayStrings[3] = new String ("r^2: " + dPrimeTable[boxX][boxY].getRSquared()); displayStrings[4] = new String ("D' conf. bounds: " + dPrimeTable[boxX][boxY].getConfidenceLow() + "-" + dPrimeTable[boxX][boxY].getConfidenceHigh()); popupExists = true; } } else if (blockselector.contains(clickX, clickY)){ int marker = (int)(0.5 + (double)((clickX - clickXShift))/boxSize); displayStrings = new String[2]; if (theData.infoKnown){ displayStrings[0] = new String (Chromosome.getFilteredMarker(marker).getName()); }else{ displayStrings[0] = new String("Marker " + (Chromosome.realIndex[marker]+1)); } displayStrings[1] = new String ("MAF: " + Chromosome.getFilteredMarker(marker).getMAF()); popupExists = true; } if (popupExists){ int strlen = 0; for (int x = 0; x < displayStrings.length; x++){ if (strlen < metrics.stringWidth(displayStrings[x])){ strlen = metrics.stringWidth(displayStrings[x]); } } //edge shifts prevent window from popping up partially offscreen int visRightBound = (int)(getVisibleRect().getWidth() + getVisibleRect().getX()); int visBotBound = (int)(getVisibleRect().getHeight() + getVisibleRect().getY()); int rightEdgeShift = 0; if (clickX + strlen + popupLeftMargin +5 > visRightBound){ rightEdgeShift = clickX + strlen + popupLeftMargin + 10 - visRightBound; } int botEdgeShift = 0; if (clickY + 5*metrics.getHeight()+10 > visBotBound){ botEdgeShift = clickY + 5*metrics.getHeight()+15 - visBotBound; } popupDrawRect = new Rectangle(clickX-rightEdgeShift, clickY-botEdgeShift, strlen+popupLeftMargin+5, displayStrings.length*metrics.getHeight()+10); repaint(); } }else if ((e.getModifiers() & InputEvent.BUTTON1_MASK) == InputEvent.BUTTON1_MASK){ int x = e.getX(); int y = e.getY(); if (blockselector.contains(x,y)){ setCursor(Cursor.getPredefinedCursor(Cursor.E_RESIZE_CURSOR)); blockStartX = x; } } }
public void run(Context context, XMLOutput output) throws Exception {
public void run(JellyContext context, XMLOutput output) throws Exception {
public void run(Context context, XMLOutput output) throws Exception { DynaTag dynaTag = (DynaTag) tag; // ### probably compiling this to 2 arrays might be quicker and smaller for ( Iterator iter = attributes.entrySet().iterator(); iter.hasNext(); ) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Expression expression = (Expression) entry.getValue(); Object value = expression.evaluate( context ); dynaTag.setAttribute( name, value ); } tag.run( context, output ); }
mouseMode = mouseModeType.SELECT_TABLE;
mouseMode = MouseModeType.SELECT_TABLE;
protected void cleanup() { if(addToPP) { pp.unzoomPoint(p); logger.debug("Placing table at: "+p); pp.addImpl(tp,p,pp.getPPComponentCount()); try { pp.db.addChild(tp.getModel()); pp.selectNone(); tp.setSelected(true); mouseMode = mouseModeType.SELECT_TABLE; } catch (ArchitectException e) { logger.error("Couldn't add table \""+tp.getModel()+"\" to play pen:", e); JOptionPane.showMessageDialog(null, "Failed to add table:\n"+e.getMessage()); return; } } else { tp.setMoving(false); } pp.setCursor(null); pp.removeMouseMotionListener(this); pp.removeMouseListener(this); pp.revalidate(); }
mouseMode = mouseModeType.RUBBERBAND_MOVE;
mouseMode = MouseModeType.RUBBERBAND_MOVE;
public void mouseMoved(MouseEvent evt) { if (rubberBand != null) { // repaint old region in case of shrinkage Rectangle dirtyRegion = zoomRect(new Rectangle(rubberBand)); Point p = unzoomPoint(evt.getPoint()); rubberBand.setBounds(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); rubberBand.add(p); mouseMode = mouseModeType.RUBBERBAND_MOVE; // update selected items Rectangle temp = new Rectangle(); // avoids multiple allocations in getBounds for (int i = 0, n = contentPane.getComponentCount(); i < n; i++) { PlayPenComponent c = contentPane.getComponent(i); if (c instanceof Relationship) { // relationship is non-rectangular so we can't use getBounds for intersection testing ((Relationship) c).setSelected(((Relationship) c).intersects(rubberBand)); } else if (c instanceof Selectable) { ((Selectable) c).setSelected(rubberBand.intersects(c.getBounds(temp))); } } // Add the new rubberband to the dirty region and grow // it in case the line is thick due to extreme zoom dirtyRegion.add(zoomRect(new Rectangle(rubberBand))); repaintRubberBandRegion(dirtyRegion); } }
if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else {
if ( mouseMode == MouseModeType.CREATING_RELATIONSHIP ) { } else {
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP;
mouseMode = MouseModeType.MULTI_SELECT; } else { mouseMode = MouseModeType.SELECT_RELATIONSHIP;
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
if ( mouseMode == mouseModeType.CREATING_TABLE ) {
if ( mouseMode == MouseModeType.CREATING_TABLE ) {
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE;
if ( !tp.isSelected() || mouseMode == MouseModeType.IDLE ) { mouseMode = MouseModeType.SELECT_TABLE;
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
mouseMode = mouseModeType.MULTI_SELECT;
mouseMode = MouseModeType.MULTI_SELECT;
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
mouseMode = mouseModeType.SELECT_COLUMN;
mouseMode = MouseModeType.SELECT_COLUMN;
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
mouseMode = mouseModeType.IDLE;
mouseMode = MouseModeType.IDLE;
public void mousePressed(MouseEvent evt) { requestFocus(); maybeShowPopup(evt); Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if (c instanceof Relationship) { Relationship r = (Relationship) c; PlayPen pp = (PlayPen) r.getPlayPen(); if ( mouseMode == mouseModeType.CREATING_RELATIONSHIP ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) != 0) { mouseMode = mouseModeType.MULTI_SELECT; } else { mouseMode = mouseModeType.SELECT_RELATIONSHIP; if ( !r.isSelected() ) { pp.selectNone(); } } } r.setSelected(true); // moving pk/fk decoration boolean overPkDec = ((RelationshipUI) r.getUI()).isOverPkDecoration(p); if (overPkDec || ((RelationshipUI) r.getUI()).isOverFkDecoration(p)) { new RelationshipDecorationMover(r, overPkDec); } } else if (c instanceof TablePane) { evt.getComponent().requestFocus(); TablePane tp = (TablePane) c; PlayPen pp = (PlayPen) tp.getPlayPen(); try { int clickCol = tp.pointToColumnIndex(p); if ( mouseMode == mouseModeType.CREATING_TABLE ) { } else { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isSelected() || mouseMode == mouseModeType.IDLE ) { mouseMode = mouseModeType.SELECT_TABLE; pp.selectNone(); } } else { mouseMode = mouseModeType.MULTI_SELECT; } if ( clickCol > TablePane.COLUMN_INDEX_TITLE && clickCol < tp.getModel().getColumns().size()) { if ( (evt.getModifiersEx() & (InputEvent.SHIFT_DOWN_MASK | InputEvent.CTRL_DOWN_MASK)) == 0) { if ( !tp.isColumnSelected(clickCol) ){ tp.deSelectEverythingElse(evt); tp.selectNone(); } mouseMode = mouseModeType.SELECT_COLUMN; } tp.selectColumn(clickCol); tp.fireSelectionEvent(new SelectionEvent(tp, SelectionEvent.SELECTION_EVENT)); tp.repaint(); } tp.setSelected(true); } if (clickCol == TablePane.COLUMN_INDEX_TITLE && !ArchitectFrame.getMainInstance().createRelationshipIsActive()) { Iterator it = pp.getSelectedTables().iterator(); logger.debug("event point: " + p); logger.debug("zoomed event point: " + pp.zoomPoint(new Point(p))); draggingTablePanes = true; while (it.hasNext()) { // create FloatingTableListener for each selected item TablePane t3 = (TablePane)it.next(); logger.debug("(" + t3.getModel().getName() + ") zoomed selected table point: " + t3.getLocationOnScreen()); logger.debug("(" + t3.getModel().getName() + ") unzoomed selected table point: " + pp.unzoomPoint(t3.getLocationOnScreen())); /* the floating table listener expects zoomed handles which are relative to the location of the table column which was clicked on. */ Point clickedColumn = tp.getLocationOnScreen(); Point otherTable = t3.getLocationOnScreen(); Point handle = pp.zoomPoint(new Point(p)); logger.debug("(" + t3.getModel().getName() + ") translation x=" + (otherTable.getX() - clickedColumn.getX()) + ",y=" + (otherTable.getY() - clickedColumn.getY())); handle.translate((int)(clickedColumn.getX() - otherTable.getX()), (int) (clickedColumn.getY() - otherTable.getY())); new PlayPen.FloatingTableListener(pp, t3, handle,false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } else { if ((evt.getModifiersEx() & InputEvent.BUTTON1_DOWN_MASK) != 0 && !evt.isPopupTrigger()) { mouseMode = mouseModeType.IDLE; selectNone(); rubberBandOrigin = new Point(p); rubberBand = new Rectangle(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); } } }
mouseMode = mouseModeType.MULTI_SELECT;
mouseMode = MouseModeType.MULTI_SELECT;
public void mouseReleased(MouseEvent evt) { draggingTablePanes = false; if (rubberBand != null) { if (evt.getButton() == MouseEvent.BUTTON1) { Rectangle dirtyRegion = rubberBand; rubberBandOrigin = null; rubberBand = null; zoomRect(dirtyRegion); repaintRubberBandRegion(dirtyRegion); if ( getSelectedItems().size() > 0 ) mouseMode = mouseModeType.MULTI_SELECT; else mouseMode = mouseModeType.IDLE; } } maybeShowPopup(evt); }
mouseMode = mouseModeType.IDLE;
mouseMode = MouseModeType.IDLE;
public void mouseReleased(MouseEvent evt) { draggingTablePanes = false; if (rubberBand != null) { if (evt.getButton() == MouseEvent.BUTTON1) { Rectangle dirtyRegion = rubberBand; rubberBandOrigin = null; rubberBand = null; zoomRect(dirtyRegion); repaintRubberBandRegion(dirtyRegion); if ( getSelectedItems().size() > 0 ) mouseMode = mouseModeType.MULTI_SELECT; else mouseMode = mouseModeType.IDLE; } } maybeShowPopup(evt); }
mouseMode = MouseModeType.MULTI_SELECT;
public void selectAll() { for (int i = 0, n = contentPane.getComponentCount(); i < n; i++) { if (contentPane.getComponent(i) instanceof Selectable) { Selectable s = (Selectable) contentPane.getComponent(i); s.setSelected(true); } } }
public static void setMouseMode(mouseModeType mouseMode) {
public static void setMouseMode(MouseModeType mouseMode) {
public static void setMouseMode(mouseModeType mouseMode) { PlayPen.mouseMode = mouseMode; }
noImage = true;
public void loadMarkers(){ markersLoaded = true; repaint(); }
System.out.println(System.currentTimeMillis());
public void paintComponent(Graphics g){ Graphics2D g2 = (Graphics2D) g; Dimension size = getSize(); Dimension pref = getPreferredSize(); Rectangle visRect = getVisibleRect(); /* boxSize = ((clipRect.width-2*H_BORDER)/dPrimeTable.length-1); if (boxSize < 12){boxSize=12;} if (boxSize < 25){ printDetails = false; boxRadius = boxSize/2; }else{ boxRadius = boxSize/2 - 1; } */ //okay so this dumb if block is to prevent the ugly repainting //bug when loading markers after the data are already being displayed, //results in a little off-centering for small datasets, but not too bad. //clickxshift and clickyshift are used later to translate from x,y coords //to the pair of markers comparison at those coords if (!(markersLoaded)){ g2.translate((size.width - pref.width) / 2, (size.height - pref.height) / 2); clickXShift = left + (size.width-pref.width)/2; clickYShift = top + (size.height - pref.height)/2; } else { g2.translate((size.width - pref.width) / 2, 0); clickXShift = left + (size.width-pref.width)/2; clickYShift = top; } FontMetrics boxFontMetrics = g.getFontMetrics(boxFont); int diamondX[] = new int[4]; int diamondY[] = new int[4]; Polygon diamond; left = H_BORDER; top = V_BORDER; FontMetrics metrics; int ascent; g2.setColor(this.getBackground()); g2.fillRect(0,0,pref.width,pref.height); g2.setColor(Color.BLACK); if (markersLoaded) { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); //// draw the marker locations BasicStroke thickerStroke = new BasicStroke(1); BasicStroke thinnerStroke = new BasicStroke(0.25f); int wide = (dPrimeTable.length-1) * boxSize; //TODO: talk to kirby about locusview scaling gizmo int lineLeft = wide/20; int lineSpan = (wide/10)*9; long minpos = Chromosome.getMarker(0).getPosition(); long maxpos = Chromosome.getMarker(Chromosome.size()-1).getPosition(); double spanpos = maxpos - minpos; g2.setStroke(thinnerStroke); g2.setColor(Color.white); g2.fillRect(left + lineLeft, 5, lineSpan, TICK_HEIGHT); g2.setColor(Color.black); g2.drawRect(left + lineLeft, 5, lineSpan, TICK_HEIGHT); for (int i = 0; i < Chromosome.size(); i++) { double pos = (Chromosome.getMarker(i).getPosition() - minpos) / spanpos; int xx = (int) (left + lineLeft + lineSpan*pos); g2.setStroke(thickerStroke); g.drawLine(xx, 5, xx, 5 + TICK_HEIGHT); g2.setStroke(thinnerStroke); g.drawLine(xx, 5 + TICK_HEIGHT, left + i*boxSize, TICK_BOTTOM); } top += TICK_BOTTOM; //// draw the marker names if (printDetails){ g.setFont(markerNameFont); metrics = g.getFontMetrics(); ascent = metrics.getAscent(); widestMarkerName = metrics.stringWidth(Chromosome.getMarker(0).getName()); for (int x = 1; x < dPrimeTable.length; x++) { int thiswide = metrics.stringWidth(Chromosome.getMarker(x).getName()); if (thiswide > widestMarkerName) widestMarkerName = thiswide; } //System.out.println(widest); g2.translate(left, top + widestMarkerName); g2.rotate(-Math.PI / 2.0); for (int x = 0; x < dPrimeTable.length; x++) { g2.drawString(Chromosome.getMarker(x).getName(),TEXT_NUMBER_GAP, x*boxSize + ascent/3); } g2.rotate(Math.PI / 2.0); g2.translate(-left, -(top + widestMarkerName)); // move everybody down top += widestMarkerName + TEXT_NUMBER_GAP; } g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } //// draw the marker numbers if (printDetails){ g.setFont(markerNumFont); metrics = g.getFontMetrics(); ascent = metrics.getAscent(); for (int x = 0; x < dPrimeTable.length; x++) { String mark = String.valueOf(Chromosome.realIndex[x] + 1); g.drawString(mark, left + x*boxSize - metrics.stringWidth(mark)/2, top + ascent); } top += boxRadius/2; // give a little space between numbers and boxes } //the following values are the bounds on the boxes we want to //display given that the current window is 'visRect' lowX = (visRect.x-clickXShift-(visRect.y + visRect.height-clickYShift))/boxSize; if (lowX < 0) { lowX = 0; } highX = ((visRect.x + visRect.width)/boxSize)+1; if (highX > dPrimeTable.length-1){ highX = dPrimeTable.length-1; } lowY = ((visRect.x-clickXShift)+(visRect.y-clickYShift))/boxSize; if (lowY < lowX+1){ lowY = lowX+1; } highY = (((visRect.x-clickXShift+visRect.width) + (visRect.y-clickYShift+visRect.height))/boxSize)+1; if (highY > dPrimeTable.length){ highY = dPrimeTable.length; } // draw table column by column for (int x = lowX; x < highX; x++) { //always draw the fewest possible boxes if (lowY < x+1){ lowY = x+1; } for (int y = lowY; y < highY; y++) { if (dPrimeTable[x][y] == null){ continue; } //TODO:if you load data then info it doesn't handle selective drawing correctly double d = dPrimeTable[x][y].getDPrime(); //double l = dPrimeTable[x][y].getLOD(); Color boxColor = dPrimeTable[x][y].getColor(); // draw markers above int xx = left + (x + y) * boxSize / 2; int yy = top + (y - x) * boxSize / 2; diamondX[0] = xx; diamondY[0] = yy - boxRadius; diamondX[1] = xx + boxRadius; diamondY[1] = yy; diamondX[2] = xx; diamondY[2] = yy + boxRadius; diamondX[3] = xx - boxRadius; diamondY[3] = yy; diamond = new Polygon(diamondX, diamondY, 4); g.setColor(boxColor); g.fillPolygon(diamond); if (boxColor == Color.white) { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g.setColor(Color.lightGray); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } if(printDetails){ g.setFont(boxFont); ascent = boxFontMetrics.getAscent(); int val = (int) (d * 100); g.setColor((val < 50) ? Color.gray : Color.black); if (val != 100) { String valu = String.valueOf(val); int widf = boxFontMetrics.stringWidth(valu); g.drawString(valu, xx - widf/2, yy + ascent/2); } } } } if (pref.getWidth() > visRect.width){ if (noImage){ //first time through draw a worldmap if dataset is big: final int WM_MAX_WIDTH = 300; double scalefactor; scalefactor = (double)(chartSize.width)/WM_MAX_WIDTH; CompoundBorder wmBorder = new CompoundBorder(BorderFactory.createRaisedBevelBorder(), BorderFactory.createLoweredBevelBorder()); worldmap = new BufferedImage((int)(chartSize.width/scalefactor)+wmBorder.getBorderInsets(this).left*2, (int)(chartSize.height/scalefactor)+wmBorder.getBorderInsets(this).top*2, BufferedImage.TYPE_3BYTE_BGR); Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(this.getBackground()); gw2.fillRect(1,1,worldmap.getWidth()-2,worldmap.getHeight()-2); //make a pretty border gw2.setColor(Color.BLACK); wmBorder.paintBorder(this,gw2,0,0,worldmap.getWidth()-1,worldmap.getHeight()-1); ir = wmBorder.getInteriorRectangle(this,0,0,worldmap.getWidth()-1, worldmap.getHeight()-1); double prefBoxSize = boxSize/scalefactor; float[] smallDiamondX = new float[4]; float[] smallDiamondY = new float[4]; GeneralPath gp; System.out.println(System.currentTimeMillis()); for (int x = 0; x < dPrimeTable.length-1; x++){ for (int y = x+1; y < dPrimeTable.length; y++){ if (dPrimeTable[x][y] == null){ continue; } double xx = (x + y)*prefBoxSize/2+wmBorder.getBorderInsets(this).left; double yy = (y - x)*prefBoxSize/2+wmBorder.getBorderInsets(this).top; smallDiamondX[0] = (float)xx; smallDiamondY[0] = (float)(yy - prefBoxSize/2); smallDiamondX[1] = (float)(xx + prefBoxSize/2); smallDiamondY[1] = (float)yy; smallDiamondX[2] = (float)xx; smallDiamondY[2] = (float)(yy + prefBoxSize/2); smallDiamondX[3] = (float)(xx - prefBoxSize/2); smallDiamondY[3] = (float)yy; gp = new GeneralPath(GeneralPath.WIND_EVEN_ODD, smallDiamondX.length); gp.moveTo(smallDiamondX[0],smallDiamondY[0]); for (int i = 1; i < smallDiamondX.length; i++){ gp.lineTo(smallDiamondX[i], smallDiamondY[i]); } gp.closePath(); gw2.setColor(dPrimeTable[x][y].getColor()); gw2.fill(gp); } } System.out.println(System.currentTimeMillis()); noImage = false; } paintWorldMap(g); } }
textData = new HaploData();
textData = new HaploData(0);
private void processFile(String fileName,int fileType,String infoFileName){ try { int outputType; long maxDistance; long negMaxDistance; HaploData textData; File OutputFile; File inputFile; inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } maxDistance = this.arg_distance * 1000; negMaxDistance = -maxDistance; outputType = this.arg_output; textData = new HaploData(); Vector result = null; if(fileType == 0){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == 1) { //read in ped file /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.arg_quiet) { System.out.println("Ignoring marker " + (index)); } } } }*/ result = textData.linkageToChrom(inputFile, 3, arg_skipCheck); }else{ //read in hapmapfile result = textData.linkageToChrom(inputFile,4,arg_skipCheck); } File infoFile; if(infoFileName.equals("")) { infoFile = null; }else{ infoFile = new File(infoFileName); } textData.prepareMarkerInput(infoFile,maxDistance,textData.getPedFile().getHMInfo()); if(!arg_quiet && infoFile != null){ System.out.println("Using marker file " + infoFile.getName()); } textData.infoKnown = true; if(this.arg_showCheck && result != null) { System.out.println("Data check results:\n" + "Name\tObsHET\tPredHET\tHWpval\t%Geno\tFamTrio\tMendErr"); for(int i=0;i<result.size();i++){ MarkerResult currentResult = (MarkerResult)result.get(i); System.out.println( Chromosome.getMarker(i).getName() +"\t"+ currentResult.getObsHet() +"\t"+ currentResult.getPredHet() +"\t"+ currentResult.getHWpvalue() +"\t"+ currentResult.getGenoPercent() +"\t"+ currentResult.getFamTrioNum() +"\t"+ currentResult.getMendErrNum()); } } if(this.arg_check && result != null){ OutputFile = new File (fileName + ".CHECK"); FileWriter saveCheckWriter = new FileWriter(OutputFile); saveCheckWriter.write("Name\tObsHET\tPredHET\tHWpval\t%Geno\tFamTrio\tMendErr\n"); for(int i=0;i<result.size();i++){ MarkerResult currentResult = (MarkerResult)result.get(i); saveCheckWriter.write( Chromosome.getMarker(i).getName() +"\t"+ currentResult.getObsHet() +"\t"+ currentResult.getPredHet() +"\t"+ currentResult.getHWpvalue() +"\t"+ currentResult.getGenoPercent() +"\t"+ currentResult.getFamTrioNum() +"\t"+ currentResult.getMendErrNum() +"\n"); } saveCheckWriter.close(); } if(outputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; switch(outputType){ case BLOX_GABRIEL: OutputFile = new File(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = new File(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = new File(fileName + ".SPINEblocks"); break; default: OutputFile = new File(fileName + ".GABRIELblocks"); break; } //this handles output type ALL int start = 0; int stop = Chromosome.getFilteredSize(); if(outputType == BLOX_ALL) { OutputFile = new File(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); } } if(this.arg_dprime) { OutputFile = new File(fileName + ".DPRIME"); if (textData.filteredDPrimeTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getFilteredSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getFilteredSize()); } } //if(fileType){ //TDT.calcTrioTDT(textData.chromosomes); //TODO: Deal with this. why do we calc TDT? and make sure not to do it except when appropriate //} } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
loginContext = new LoginContext(AuthConstants.AUTH_CONFIG_INDEX, callbackHandler);
final LoginContext loginContext = new LoginContext(AuthConstants.AUTH_CONFIG_INDEX, callbackHandler);
public void login(ServiceContext context, String username, String password) throws ServiceException{ LoginCallbackHandler callbackHandler = new LoginCallbackHandler(username, password); User user = null; UserManager userManager = UserManager.getInstance(); UserActivityLogger logger = UserActivityLogger.getInstance(); try{ loginContext = new LoginContext(AuthConstants.AUTH_CONFIG_INDEX, callbackHandler); loginContext.login(); /* Need this for external login modules, user is really authenticated after this step */ Set principals = loginContext.getSubject().getPrincipals(); Object obj = null; for(Iterator principalIt = principals.iterator(); principalIt.hasNext();){ if((obj = principalIt.next()) instanceof User){ user = (User)obj; break; } } /* Successful login: - Add new users authenticated through external LoginModules. - Update the lock count and status of existing users */ if(user == null){ user = new User(); user.setUsername(username); user.setExternalUser(true); List roles = new ArrayList(); roles.add(new Role(org.jmanage.core.auth.ExternalUserRolesConfig.getInstance().getUserRole(username))); user.setRoles(roles); }else{ user = userManager.getUser(user.getName()); user.setLockCount(0); user.setStatus(null); userManager.updateUser(user); } /* set Subject in session */ context._setUser(user); logger.logActivity(user.getName(), "logged in successfully"); }catch(LoginException lex){ user = userManager.getUser(username); String errorCode = ErrorCodes.UNKNOWN_ERROR; Object[] values = null; /* Conditionalize the error message */ if(user == null){ errorCode = ErrorCodes.INVALID_CREDENTIALS; }else if(User.STATUS_LOCKED.equals(user.getStatus())){ errorCode = ErrorCodes.ACCOUNT_LOCKED; }else if(user.getLockCount() < MAX_LOGIN_ATTEMPTS_ALLOWED){ int thisAttempt = user.getLockCount()+1; user.setLockCount(thisAttempt); if(thisAttempt == MAX_LOGIN_ATTEMPTS_ALLOWED){ user.setStatus(User.STATUS_LOCKED); userManager.updateUser(user); errorCode = ErrorCodes.ACCOUNT_LOCKED; }else{ userManager.updateUser(user); errorCode = ErrorCodes.INVALID_LOGIN_ATTEMPTS; values = new Object[]{ String.valueOf(MAX_LOGIN_ATTEMPTS_ALLOWED - thisAttempt)}; } } if(user != null) logger.logActivity(username, user.getName()+" failed to login"); throw new ServiceException(errorCode, values); } }
try{ loginContext.logout(); UserActivityLogger.getInstance().logActivity(user.getName(), "logged out successfully"); }catch(LoginException lex){ throw new ServiceException(ErrorCodes.UNKNOWN_ERROR); }
UserActivityLogger.getInstance().logActivity(user.getName(), "logged out successfully");
public void logout(ServiceContext context, User user)throws ServiceException{ try{ loginContext.logout(); UserActivityLogger.getInstance().logActivity(user.getName(), "logged out successfully"); }catch(LoginException lex){ throw new ServiceException(ErrorCodes.UNKNOWN_ERROR); } }
Point bcp = new Point(Math.max(0, Math.min(tpsize.width, p.x)), Math.max(0, Math.min(tpsize.height, p.y))); boolean adjustX = bcp.y != 0 && bcp.y != tpsize.height; boolean adjustY = bcp.x != 0 && bcp.x != tpsize.width;
if (tp == relationship.getPkTable()) { if ((orientation & PARENT_FACES_LEFT) != 0) ep = new Point(0, Math.max(0, Math.min(tpsize.height, p.y))); else if ((orientation & PARENT_FACES_RIGHT) != 0) ep = new Point(tpsize.width, Math.max(0, Math.min(tpsize.height, p.y))); else if ((orientation & PARENT_FACES_TOP) != 0) ep = new Point(Math.max(0, Math.min(tpsize.width, p.x)), 0); else if ((orientation & PARENT_FACES_BOTTOM) != 0) ep = new Point(Math.max(0, Math.min(tpsize.width, p.x)), tpsize.height); else ep = new Point(p); } else if (tp == relationship.getFkTable()) { if ((orientation & CHILD_FACES_LEFT) != 0) ep = new Point(0, Math.max(0, Math.min(tpsize.height, p.y))); else if ((orientation & CHILD_FACES_RIGHT) != 0) ep = new Point(tpsize.width, Math.max(0, Math.min(tpsize.height, p.y))); else if ((orientation & CHILD_FACES_TOP) != 0) ep = new Point(Math.max(0, Math.min(tpsize.width, p.x)), 0); else if ((orientation & CHILD_FACES_BOTTOM) != 0) ep = new Point(Math.max(0, Math.min(tpsize.width, p.x)), tpsize.height); else ep = new Point(p); } else {
public Point closestEdgePoint(TablePane tp, Point p) { Dimension tpsize = tp.getSize(); // clip point p to inside of tp Point bcp = new Point(Math.max(0, Math.min(tpsize.width, p.x)), Math.max(0, Math.min(tpsize.height, p.y))); boolean adjustX = bcp.y != 0 && bcp.y != tpsize.height; boolean adjustY = bcp.x != 0 && bcp.x != tpsize.width; // push x-coordinate to left or right edge of tp, if y-coord is inside tp if (adjustX) { if (bcp.x < (tpsize.width/2)) { bcp.x = 0; } else { bcp.x = tpsize.width; } } // push y-coordinate to top or bottom edge of tp, if x-coord is inside tp if (adjustY) { if (bcp.y < (tpsize.height/2)) { bcp.y = 0; } else { bcp.y = tpsize.height; } } return bcp; }
if (adjustX) { if (bcp.x < (tpsize.width/2)) { bcp.x = 0; } else { bcp.x = tpsize.width;
ep = new Point(Math.max(0, Math.min(tpsize.width, p.x)), Math.max(0, Math.min(tpsize.height, p.y))); boolean adjustX = ep.y != 0 && ep.y != tpsize.height; boolean adjustY = ep.x != 0 && ep.x != tpsize.width; if (adjustX) { if (ep.x < (tpsize.width/2)) { ep.x = 0; } else { ep.x = tpsize.width; }
public Point closestEdgePoint(TablePane tp, Point p) { Dimension tpsize = tp.getSize(); // clip point p to inside of tp Point bcp = new Point(Math.max(0, Math.min(tpsize.width, p.x)), Math.max(0, Math.min(tpsize.height, p.y))); boolean adjustX = bcp.y != 0 && bcp.y != tpsize.height; boolean adjustY = bcp.x != 0 && bcp.x != tpsize.width; // push x-coordinate to left or right edge of tp, if y-coord is inside tp if (adjustX) { if (bcp.x < (tpsize.width/2)) { bcp.x = 0; } else { bcp.x = tpsize.width; } } // push y-coordinate to top or bottom edge of tp, if x-coord is inside tp if (adjustY) { if (bcp.y < (tpsize.height/2)) { bcp.y = 0; } else { bcp.y = tpsize.height; } } return bcp; }
} if (adjustY) { if (bcp.y < (tpsize.height/2)) { bcp.y = 0; } else { bcp.y = tpsize.height;
if (adjustY) { if (ep.y < (tpsize.height/2)) { ep.y = 0; } else { ep.y = tpsize.height; }
public Point closestEdgePoint(TablePane tp, Point p) { Dimension tpsize = tp.getSize(); // clip point p to inside of tp Point bcp = new Point(Math.max(0, Math.min(tpsize.width, p.x)), Math.max(0, Math.min(tpsize.height, p.y))); boolean adjustX = bcp.y != 0 && bcp.y != tpsize.height; boolean adjustY = bcp.x != 0 && bcp.x != tpsize.width; // push x-coordinate to left or right edge of tp, if y-coord is inside tp if (adjustX) { if (bcp.x < (tpsize.width/2)) { bcp.x = 0; } else { bcp.x = tpsize.width; } } // push y-coordinate to top or bottom edge of tp, if x-coord is inside tp if (adjustY) { if (bcp.y < (tpsize.height/2)) { bcp.y = 0; } else { bcp.y = tpsize.height; } } return bcp; }
return bcp;
return ep;
public Point closestEdgePoint(TablePane tp, Point p) { Dimension tpsize = tp.getSize(); // clip point p to inside of tp Point bcp = new Point(Math.max(0, Math.min(tpsize.width, p.x)), Math.max(0, Math.min(tpsize.height, p.y))); boolean adjustX = bcp.y != 0 && bcp.y != tpsize.height; boolean adjustY = bcp.x != 0 && bcp.x != tpsize.width; // push x-coordinate to left or right edge of tp, if y-coord is inside tp if (adjustX) { if (bcp.x < (tpsize.width/2)) { bcp.x = 0; } else { bcp.x = tpsize.width; } } // push y-coordinate to top or bottom edge of tp, if x-coord is inside tp if (adjustY) { if (bcp.y < (tpsize.height/2)) { bcp.y = 0; } else { bcp.y = tpsize.height; } } return bcp; }
this.getRootPane().setDefaultButton(okButton);
void load(int ft){ fileType = ft; JPanel contents = new JPanel(); contents.setLayout(new BoxLayout(contents, BoxLayout.Y_AXIS)); JPanel filePanel = new JPanel(); filePanel.setLayout(new BoxLayout(filePanel, BoxLayout.Y_AXIS)); JPanel topFilePanel = new JPanel(); JPanel botFilePanel = new JPanel(); genoFileField = new JTextField("",20); infoFileField = new JTextField("",20); JButton browseGenoButton = new JButton("Browse"); browseGenoButton.setActionCommand(BROWSE_GENO); browseGenoButton.addActionListener(this); JButton browseInfoButton = new JButton("Browse"); browseInfoButton.setActionCommand(BROWSE_INFO); browseInfoButton.addActionListener(this); topFilePanel.add(new JLabel("Genotype file: ")); topFilePanel.add(genoFileField); topFilePanel.add(browseGenoButton); botFilePanel.add(new JLabel("Locus information file: ")); botFilePanel.add(infoFileField); botFilePanel.add(browseInfoButton); filePanel.add(topFilePanel); filePanel.add(botFilePanel); filePanel.setBorder(BorderFactory.createEmptyBorder(10,10,10,10)); contents.add(filePanel); JPanel prefsPanel = new JPanel(); maxComparisonDistField = new NumberTextField("200",4); prefsPanel.add(new JLabel("Ignore pairwise comparisons of markers >")); prefsPanel.add(maxComparisonDistField); prefsPanel.add(new JLabel("kb apart.")); contents.add(prefsPanel); JPanel choicePanel = new JPanel(); JButton okButton = new JButton("OK"); okButton.addActionListener(this); JButton cancelButton = new JButton("Cancel"); cancelButton.addActionListener(this); choicePanel.add(okButton); choicePanel.add(cancelButton); contents.add(choicePanel); this.setContentPane(contents); this.pack(); }
if ( selected != null ) { String newName = (String) JOptionPane.showInputDialog( this, "Enter name for new folder", "New folder", JOptionPane.PLAIN_MESSAGE, null, null, "New folder" ); if ( newName != null ) { PhotoFolder newFolder = PhotoFolder.create( newName, selected ); } }
if ( selected != null ) { boolean ready = false; while ( !ready ) { String newName = (String) JOptionPane.showInputDialog( this, "Enter name for new folder", "New folder", JOptionPane.PLAIN_MESSAGE, null, null, "New folder" ); if ( newName != null ) { if ( newName.length() > PhotoFolder.NAME_LENGTH ) { JOptionPane.showMessageDialog( this, "Folder name cannot be longer than " + PhotoFolder.NAME_LENGTH + " characters", "Too long name", JOptionPane.ERROR_MESSAGE, null ); } else { PhotoFolder newFolder = PhotoFolder.create( newName, selected ); ready = true; } } else { ready = true; } } }
void createNewFolder() { if ( selected != null ) { String newName = (String) JOptionPane.showInputDialog( this, "Enter name for new folder", "New folder", JOptionPane.PLAIN_MESSAGE, null, null, "New folder" ); if ( newName != null ) { PhotoFolder newFolder = PhotoFolder.create( newName, selected ); } } }
String newName = (String) JOptionPane.showInputDialog( this, "Enter new name", "Rename folder", JOptionPane.PLAIN_MESSAGE, null, null, origName ); if ( newName != null ) { PhotoFolder f = selected; f.setName( newName ); log.debug( "Changed name to " + newName ); } }
boolean ready = false; while ( !ready ) { String newName = (String) JOptionPane.showInputDialog( this, "Enter new name", "Rename folder", JOptionPane.PLAIN_MESSAGE, null, null, origName ); if ( newName != null ) { PhotoFolder f = selected; try { f.setName( newName ); ready = true; } catch ( IllegalArgumentException e ) { JOptionPane.showMessageDialog( this, "Folder name cannot be longer than " + PhotoFolder.NAME_LENGTH + " characters", "Too long name", JOptionPane.ERROR_MESSAGE, null ); } log.debug( "Changed name to " + newName ); } else { ready = true; } } }
void renameSelectedFolder() { if ( selected != null ) { String origName = selected.getName(); String newName = (String) JOptionPane.showInputDialog( this, "Enter new name", "Rename folder", JOptionPane.PLAIN_MESSAGE, null, null, origName ); if ( newName != null ) { PhotoFolder f = selected; f.setName( newName ); log.debug( "Changed name to " + newName ); } } }
markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf));
markerInfo.add(new SNP(String.valueOf(i), (i*3000), maf));
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ //if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } //} } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ //if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } //} } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; //set up the indexing to take into account skipped markers. Need //to loop through twice because first time we just count number of //unskipped markers int count = 0; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ count++; } } Chromosome.realIndex = new int[count]; int k = 0; for (int i =0; i < numMarkers; i++){ if (markerResults[i]){ Chromosome.realIndex[k] = i; k++; } } //fake the marker info for now Vector markerInfo = new Vector(); for (int i = 0; i < numMarkers; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).unfilteredElementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; Chromosome.markers = markerInfo.toArray(); //return chrom; }
markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf));
markerInfo.add(new SNP(String.valueOf(i), (i*3000), maf));
void prepareHapsInput(File infile) throws IOException{ //this method is called to suck in data from a file (its only argument) //of genotypes and sets up the Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); boolean firstTime = true; while ((currentLine = in.readLine()) != null){ //each line is expected to be of the format: //ped indiv geno geno geno geno... StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv ped = st.nextToken(); indiv = st.nextToken(); //all other tokens are loaded into a vector (they should all be genotypes) //the first time through, count number of genotypes for marker quality statistics if (firstTime){ numBadGenotypes = new double[st.countTokens()]; percentBadGenotypes = new double[st.countTokens()]; } genos = new byte[st.countTokens()]; int q = 0; while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 5; }else{ genos[q] = Byte.parseByte(thisGenotype); } if (genos[q] == 0) numBadGenotypes[q] ++; q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, infile.getName())); firstTime = false; } //generate marker information in case none is subsequently available //also convert sums of bad genotypes to percentages for each marker double numChroms = chroms.size(); Vector markerInfo = new Vector(); Chromosome.realIndex = new int[genos.length]; for (int i = 0; i < genos.length; i++){ Chromosome.realIndex[i] = i; //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chroms.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chroms.elementAt(j)).unfilteredElementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chroms; Chromosome.markers = markerInfo.toArray(); //return chroms; }
int prepareMarkerInput(File infile) throws IOException{
int prepareMarkerInput(File infile, long maxdist) throws IOException{
int prepareMarkerInput(File infile) throws IOException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position String currentLine; Vector markers = new Vector(); //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; while ((currentLine = in.readLine()) != null){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).unfilteredElementAt(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } //System.out.println(numa1 + " " + numa2); double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; StringTokenizer st = new StringTokenizer(currentLine); markers.add(new SNP(st.nextToken(), Long.parseLong(st.nextToken()), infile.getName(), maf)); snpcount ++; } if (Chromosome.markers.length == markers.size()){ Chromosome.markers = markers.toArray(); markersLoaded = true; return 1; }else{ return -1; } }
for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = Chromosome.getMarker(pos1).getPosition() - Chromosome.getMarker(pos2).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ dPrimeTable[pos1][pos2] = null; continue; } } } }
int prepareMarkerInput(File infile) throws IOException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position String currentLine; Vector markers = new Vector(); //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; while ((currentLine = in.readLine()) != null){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).unfilteredElementAt(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } //System.out.println(numa1 + " " + numa2); double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; StringTokenizer st = new StringTokenizer(currentLine); markers.add(new SNP(st.nextToken(), Long.parseLong(st.nextToken()), infile.getName(), maf)); snpcount ++; } if (Chromosome.markers.length == markers.size()){ Chromosome.markers = markers.toArray(); markersLoaded = true; return 1; }else{ return -1; } }
public CreateRelationshipAction(PlayPen pp) {
public CreateRelationshipAction() {
public CreateRelationshipAction(PlayPen pp) { super("Create Relationship"); this.pp = pp; pp.addSelectionListener(this); }
this.pp = pp; pp.addSelectionListener(this);
setEnabled(false);
public CreateRelationshipAction(PlayPen pp) { super("Create Relationship"); this.pp = pp; pp.addSelectionListener(this); }
setShoresMixedCaseIdentifier(dbmd.storesMixedCaseIdentifiers()); setShoresUpperCaseIdentifier(dbmd.storesMixedCaseIdentifiers()); setShoresLowerCaseIdentifier(dbmd.storesMixedCaseIdentifiers());
public synchronized void populate() throws ArchitectException { if (populated) return; int oldSize = children.size(); logger.debug("SQLDatabase: populate starting"); Connection con = null; ResultSet rs = null; try { con = getConnection(); DatabaseMetaData dbmd = con.getMetaData(); rs = dbmd.getCatalogs(); while (rs.next()) { String catName = rs.getString(1); SQLCatalog cat = null; if (catName != null) { cat = new SQLCatalog(this, catName); cat.setNativeTerm(dbmd.getCatalogTerm()); logger.debug("Set catalog term to "+cat.getNativeTerm()); children.add(cat); } } rs.close(); rs = null; // if we tried to get Catalogs, and there were none, then I guess // we should look for Schemas instead (i.e. this database has no // catalogs, and schemas attached directly to the database) if ( children.size() == oldSize ) { rs = dbmd.getSchemas(); while (rs.next()) { children.add(new SQLSchema(this, rs.getString(1),false)); } rs.close(); rs = null; } } catch (SQLException e) { throw new ArchitectException("database.populate.fail", e); } finally { populated = true; int newSize = children.size(); if (newSize > oldSize) { int[] changedIndices = new int[newSize - oldSize]; for (int i = 0, n = newSize - oldSize; i < n; i++) { changedIndices[i] = oldSize + i; } fireDbChildrenInserted(changedIndices, children.subList(oldSize, newSize)); } try { if ( rs != null ) rs.close(); } catch (SQLException e2) { throw new ArchitectException("database.rs.close.fail", e2); } } logger.debug("SQLDatabase: populate finished"); }
EM(){
EM(Vector chromosomes, int numTrios){
EM(){ //an old-school speedup courtesy of mjdaly two_n[0]=1; for (int i=1; i<31; i++){ two_n[i]=2*two_n[i-1]; } }
this.chromosomes = chromosomes; this.numTrios = numTrios;
EM(){ //an old-school speedup courtesy of mjdaly two_n[0]=1; for (int i=1; i<31; i++){ two_n[i]=2*two_n[i-1]; } }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{
private void full_em_breakup( byte[][] input_haplos, int[] block_size, int numTrios, Vector affStatus) throws HaploViewException{
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
boolean trioPhasing = true;
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
if (trioPhasing) {
if (Options.getAssocTest() == 2) {
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */
tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ Vector caseFreqs = new Vector(); Vector controlFreqs = new Vector(); double[] tempCase, tempControl, totalCase, totalControl; if (Options.getAssocTest() == 1){ tempCase = new double[poss_full]; tempControl = new double[poss_full]; totalCase = new double[poss_full]; totalControl = new double[poss_full]; double tempnorm=0; for (int i = numTrios*2; i < num_indivs; i++){ for (int n=0; n<superdata[i].nsuper; n++) { if (((Integer)affStatus.elementAt(i)).intValue() == 1){ tempControl[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempControl[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; }else if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempCase[superdata[i].superposs[n].h1] += superdata[i].superposs[n].p; tempCase[superdata[i].superposs[n].h2] += superdata[i].superposs[n].p; } tempnorm += superdata[i].superposs[n].p; } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempCase[j] > 0.0000 || tempControl[j] > 0.0000) { totalCase[j] += (tempCase[j]/tempnorm); totalControl[j] += (tempControl[j]/tempnorm); tempCase[j]=tempControl[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { caseFreqs.add(new Double(totalCase[j])); controlFreqs.add(new Double(totalControl[j])); } } }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
if(trioPhasing)
if(Options.getAssocTest() == 2)
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0;
double tempnorm=0,product;
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product;
if (((Integer)affStatus.elementAt(i)).intValue() == 2){ tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } tempnorm+=product;
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product;
} } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000;
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } tempnorm+=product;
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
} } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00;
tempnorm=0.00; }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
EMReturn results;
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results;
this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; if (Options.getAssocTest() == 2){ this.obsT = obsT; this.obsU = obsU; } else if (Options.getAssocTest() == 1){ this.caseFreqs = caseFreqs; this.controlFreqs = controlFreqs; }
public EMReturn full_em_breakup( byte[][] input_haplos, int max_missing, int[] block_size, int dump_phased_haplos, int numTrios) throws HaploViewException{ int num_poss, iter;//, maxk, numk; double total;//, maxprob; int block, start_locus, end_locus, biggest_block_size; int poss_full;//, best, h1, h2; int num_indivs=0; boolean trioPhasing = true; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> 100)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; prob = new double[num_poss]; /* for trio option */ if (trioPhasing) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //System.out.println("made it to 110"); //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); // start prob array with probabilities from full observations for (int j=0; j<num_poss; j++) { prob[j]=PSEUDOCOUNT; } total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); prob[tempRec.h1]+=1.0; prob[tempRec.h2]+=1.0; total+=2.0; } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p = (float)(prob[tempRec.h1]*prob[tempRec.h2]); total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob for (int j=0; j<num_poss; j++) { prob[j]=1e-10; } total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); prob[tempRec.h1]+=tempRec.p; prob[tempRec.h2]+=tempRec.p; total+=(2.0*(tempRec.p)); } } // normalize for (int j=0; j<num_poss; j++) { prob[j] /= total; } iter++; } // printf("FINAL PROBABILITIES:\n"); int m=0; for (int j=0; j<num_poss; j++) { hint[j]=-1; if (prob[j] > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=j; hprob[block][m]=prob[j]; hint[j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } //TODO:System.out.println(poss_full); /* LIGATE and finish this mess :) *//* if (poss_full > 1000000) { /* what we really need to do is go through and pare back to using a smaller number (e.g., > .002, .005) //printf("too many possibilities: %d\n",poss_full); return(-5); }*/ double[] superprob = new double[poss_full]; create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ for (int j=0; j<poss_full; j++) { superprob[j]=PSEUDOCOUNT; } total=(double)poss_full; total *= PSEUDOCOUNT; //System.out.println("made it to 232"); /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { superprob[superdata[i].superposs[0].h1]+=1.0; superprob[superdata[i].superposs[0].h2]+=1.0; total+=2.0; } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p = (float) (superprob[superdata[i].superposs[k].h1]* superprob[superdata[i].superposs[k].h2]); total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ for (int j=0; j<poss_full; j++) { superprob[j]=1e-10; } total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { superprob[superdata[i].superposs[k].h1]+=superdata[i].superposs[k].p; superprob[superdata[i].superposs[k].h2]+=superdata[i].superposs[k].p; total+=(2.0*superdata[i].superposs[k].p); } } /* normalize */ for (int j=0; j<poss_full; j++) { superprob[j] /= total; } iter++; } //System.out.println("made it to 290"); /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ /* Enumeration theHaplos = haplos_present.elements(); String tempHap; while(theHaplos.hasMoreElements()) { tempHap = (String)theHaplos.nextElement(); System.out.println(tempHap); } */ double[] tempT,totalT,tempU,totalU; Vector obsT = new Vector(); Vector obsU = new Vector(); if(trioPhasing) { int best1=0,best2=0,h1,h2; double tempnorm=0,product,bestProduct=0; tempT = new double[poss_full]; totalT = new double[poss_full]; tempU = new double[poss_full]; totalU = new double[poss_full]; for (int i=0; i<numTrios*2; i+=2) { best1=0; best2=0; bestProduct=-999.999; tempnorm=0.00; for (int n=0; n<superdata[i].nsuper; n++) { for (int m=0; m<superdata[i+1].nsuper; m++) { if (kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci)) { product=superdata[i].superposs[n].p*superdata[i+1].superposs[m].p; if (product > bestProduct) { best1=n; best2=m; bestProduct=product; } if (superdata[i].superposs[n].h1 != superdata[i].superposs[n].h2) { tempT[superdata[i].superposs[n].h1]+=product; tempU[superdata[i].superposs[n].h2]+=product; } if (superdata[i+1].superposs[m].h1 != superdata[i+1].superposs[m].h2) { tempT[superdata[i+1].superposs[m].h1]+=product; tempU[superdata[i+1].superposs[m].h2]+=product; } /* normalize by all possibilities, even double hom */ tempnorm+=product; } } } if (tempnorm > 0.00) { for (int j=0; j<poss_full; j++) { if (tempT[j] > 0.0000 || tempU[j] > 0.0000) { totalT[j] += (tempT[j]/tempnorm); totalU[j] += (tempU[j]/tempnorm); tempT[j]=tempU[j]=0.0000; } } tempnorm=0.00; } } for (int j = 0; j <poss_full; j++){ if (superprob[j] > .001) { obsT.add(new Double(totalT[j])); obsU.add(new Double(totalU[j])); } } } EMReturn results; Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); for (int j=0; j<poss_full; j++) { if (superprob[j] > .001) { haplos_present.addElement(decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); //sprintf(haplos_present[k],"%s",decode_haplo_str(j,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(superprob[j])); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } if (trioPhasing){ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs, obsT, obsU); }else{ results = new EMReturn((int[][])haplos_present.toArray(new int[0][0]), freqs); } return results; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; }
suite.addTestSuite(TestRelationship.class);
public static Test suite() { TestSuite suite = new TestSuite("Test for Architect's Swing GUI"); //$JUnit-BEGIN$ suite.addTestSuite(TestSwingUIProject.class); suite.addTestSuite(TestArchitectFrame.class); suite.addTestSuite(TestAutoLayoutAction.class); suite.addTestSuite(TestPlayPen.class); suite.addTestSuite(TestUndoManager.class); suite.addTestSuite(TestColumnEditPanel.class); suite.addTestSuite(TestSQLObjectUndoableEventAdapter.class); suite.addTestSuite(TestFruchtermanReingoldForceLayout.class); suite.addTestSuite(TestCompareDMPanel.class); suite.addTestSuite(TestTablePane.class); suite.addTestSuite(TestDeleteSelectedAction.class); //$JUnit-END$ return suite; }
public float getFStop() {
public double getFStop() {
public float getFStop() { return FStop; }
public float getFocalLength() {
public double getFocalLength() {
public float getFocalLength() { return focalLength; }