rem
stringlengths
0
477k
add
stringlengths
0
313k
context
stringlengths
6
599k
public static PhotoInfo retrievePhotoInfo( String strPhotoId ) throws PhotoNotFoundException {
public static PhotoInfo retrievePhotoInfo( int photoId ) throws PhotoNotFoundException {
public static PhotoInfo retrievePhotoInfo( String strPhotoId ) throws PhotoNotFoundException { initDB(); String sql = "SELECT * from photos where photo_id=\"" + strPhotoId +"\""; PhotoInfo photo = new PhotoInfo(); try { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery( sql ); if ( !rs.next() ) { throw new PhotoNotFoundException(); } photo.shootingPlace = rs.getString( "shooting_place" ); photo.photographer = rs.getString( "photographer" ); photo.FStop = rs.getFloat( "f_stop" ); photo.focalLength = rs.getFloat( "focal_length" ); photo.shootTime = rs.getDate( "shoot_time" ); rs.close(); stmt.close(); } catch (SQLException e ) { System.err.println( "Error fetching record: " + e.getMessage() ); // TODO: Actually this is not the right exception for this purpose throw new PhotoNotFoundException(); } return photo; }
String sql = "SELECT * from photos where photo_id=\"" + strPhotoId +"\"";
String sql = "SELECT * from photos where photo_id=\"" + photoId +"\"";
public static PhotoInfo retrievePhotoInfo( String strPhotoId ) throws PhotoNotFoundException { initDB(); String sql = "SELECT * from photos where photo_id=\"" + strPhotoId +"\""; PhotoInfo photo = new PhotoInfo(); try { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery( sql ); if ( !rs.next() ) { throw new PhotoNotFoundException(); } photo.shootingPlace = rs.getString( "shooting_place" ); photo.photographer = rs.getString( "photographer" ); photo.FStop = rs.getFloat( "f_stop" ); photo.focalLength = rs.getFloat( "focal_length" ); photo.shootTime = rs.getDate( "shoot_time" ); rs.close(); stmt.close(); } catch (SQLException e ) { System.err.println( "Error fetching record: " + e.getMessage() ); // TODO: Actually this is not the right exception for this purpose throw new PhotoNotFoundException(); } return photo; }
photo.FStop = rs.getFloat( "f_stop" ); photo.focalLength = rs.getFloat( "focal_length" );
photo.FStop = rs.getDouble( "f_stop" ); photo.focalLength = rs.getDouble( "focal_length" );
public static PhotoInfo retrievePhotoInfo( String strPhotoId ) throws PhotoNotFoundException { initDB(); String sql = "SELECT * from photos where photo_id=\"" + strPhotoId +"\""; PhotoInfo photo = new PhotoInfo(); try { Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery( sql ); if ( !rs.next() ) { throw new PhotoNotFoundException(); } photo.shootingPlace = rs.getString( "shooting_place" ); photo.photographer = rs.getString( "photographer" ); photo.FStop = rs.getFloat( "f_stop" ); photo.focalLength = rs.getFloat( "focal_length" ); photo.shootTime = rs.getDate( "shoot_time" ); rs.close(); stmt.close(); } catch (SQLException e ) { System.err.println( "Error fetching record: " + e.getMessage() ); // TODO: Actually this is not the right exception for this purpose throw new PhotoNotFoundException(); } return photo; }
public void setFStop(float v) {
public void setFStop(double v) {
public void setFStop(float v) { this.FStop = v; }
public void setFocalLength(float v) {
public void setFocalLength(double v) {
public void setFocalLength(float v) { this.focalLength = v; }
}else{ dpTemp.add(computeDPrime(pos1,pos2));
void generateDPrimeTable(){ //calculating D prime requires the number of each possible 2 marker //haplotype in the dataset dpTable = new DPrimeTable(Chromosome.getUnfilteredSize()); totalComps = (Chromosome.getUnfilteredSize()*(Chromosome.getUnfilteredSize()-1))/2; compsDone =0; //loop through all marker pairs for (int pos1 = 0; pos1 < Chromosome.getUnfilteredSize()-1; pos1++){ Vector dpTemp= new Vector(); for (int pos2 = pos1 + 1; pos2 < Chromosome.getUnfilteredSize(); pos2++){ //if the markers are too far apart don't try to compare them long sep = Chromosome.getUnfilteredMarker(pos2).getPosition() - Chromosome.getUnfilteredMarker(pos1).getPosition(); if (maxdist > 0){ if (sep <= maxdist){ dpTemp.add(computeDPrime(pos1,pos2)); } } } dpTable.addMarker(dpTemp,pos1); } }
JButton okButton = new JButton(ArchitectPanelBuilder.OK_BUTTON_LABEL);
JDefaultButton okButton = new JDefaultButton(ArchitectPanelBuilder.OK_BUTTON_LABEL);
public void showPreferencesDialog() { // XXX Can't easily use ArchitectPanelBuilder since this // contains a JTabbedPane which is not an ArchitectPanel. final JDialog d = new JDialog(af, "User Preferences"); JPanel cp = new JPanel(new BorderLayout(12,12)); JTabbedPane tp = new JTabbedPane(); cp.add(tp, BorderLayout.CENTER); cp.setBorder(BorderFactory.createEmptyBorder(12,12,12,12)); final PreferencesPanel prefPanel = new PreferencesPanel(af.getUserSettings()); tp.add("General", prefPanel); final JDBCDriverPanel jdbcPanel = new JDBCDriverPanel(af.getArchitectSession()); tp.add("JDBC Drivers", jdbcPanel); JPanel buttonPanel = new JPanel(new FlowLayout(FlowLayout.RIGHT)); JButton okButton = new JButton(ArchitectPanelBuilder.OK_BUTTON_LABEL); okButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { prefPanel.applyChanges(); jdbcPanel.applyChanges(); d.setVisible(false); } }); buttonPanel.add(okButton); Action cancelAction = new AbstractAction() { public void actionPerformed(ActionEvent evt) { prefPanel.discardChanges(); jdbcPanel.discardChanges(); d.setVisible(false); } }; cancelAction.putValue(Action.NAME, ArchitectPanelBuilder.CANCEL_BUTTON_LABEL); JButton cancelButton = new JButton(cancelAction); buttonPanel.add(cancelButton); ArchitectPanelBuilder.makeJDialogCancellable(d, cancelAction); d.getRootPane().setDefaultButton(okButton); cp.add(buttonPanel, BorderLayout.SOUTH); d.setContentPane(cp); d.pack(); d.setLocationRelativeTo(ArchitectFrame.getMainInstance()); d.setVisible(true); }
rs.close();
protected void createTypeMap() throws SQLException { if (con == null || !allowConnection) { throw new UnsupportedOperationException("Can't create a type map without DatabaseMetaData"); } typeMap = new HashMap(); DatabaseMetaData dbmd = con.getMetaData(); ResultSet rs = dbmd.getTypeInfo(); while (rs.next()) { GenericTypeDescriptor td = new GenericTypeDescriptor(rs); typeMap.put(new Integer(td.getDataType()), td); } }
Chromosome.markers = null;
void prepareHapsInput(File infile) throws IOException, HaploViewException{ //this method is called to suck in data from a file (its only argument) //of genotypes and sets up the Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; if(infile.length() < 1){ throw new HaploViewException("Genotype file is empty or does not exist: " + infile.getName()); } //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; int numTokens = 0; boolean even = true; while ((currentLine = in.readLine()) != null){ lineCount++; //each line is expected to be of the format: //ped indiv geno geno geno geno... if (currentLine.length() == 0){ //skip blank lines continue; } even = !even; StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv if (st.hasMoreTokens()){ ped = st.nextToken(); indiv = st.nextToken(); }else{ throw new HaploViewException("Genotype file error:\nLine " + lineCount + " appears to have fewer than 2 columns."); } //all other tokens are loaded into a vector (they should all be genotypes) genos = new byte[st.countTokens()]; int q = 0; if (numTokens == 0){ numTokens = st.countTokens(); } if (numTokens != st.countTokens()){ throw new HaploViewException("Genotype file error:\nLine " + lineCount + " appears to have an incorrect number of entries"); } while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 9; }else{ try{ genos[q] = Byte.parseByte(thisGenotype); }catch (NumberFormatException nfe){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + thisGenotype + "\" on line " + lineCount + " not allowed."); } } if (genos[q] < 0 || genos[q] > 9){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + genos[q] + "\" on line " + lineCount + " not allowed."); } q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, false, infile.getName())); } if (!even){ //we're missing a line here throw new HaploViewException("Genotype file appears to have an odd number of lines.\n"+ "Each individual is required to have two chromosomes"); } chromosomes = chroms; //initialize realIndex Chromosome.realIndex = new int[genos.length]; for (int i = 0; i < genos.length; i++){ Chromosome.realIndex[i] = i; } }
if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); }
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } /*if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } */ prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else { //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
else {
else if (infile != null){
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); Vector positions = new Vector(); Vector extras = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } /*if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } */ prevloc = loc; names.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } else { //we only sort if we read the info from an info file. if //it is from a hapmap file, then the markers were already sorted //when they were read in (in class Pedfile). int numLines = names.size(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines]; boolean needSort = false; //this loop stores the positions of each marker in an array (pos[]) in the order they appear in the file. //it also creates a hashtable with the positions as keys and their index in the pos[] array as the value for (int k = 0; k < (numLines); k++){ pos[k] = new Long((String)(positions.get(k))).longValue(); sortHelp.put(new Long(pos[k]),new Integer(k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(pos[k] < pos[k-1]) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort) { //sort the positions Arrays.sort(pos); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < pos.length; i++){ realPos[i] = ((Integer)(sortHelp.get(new Long(pos[i])))).intValue(); newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[pos.length]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<pos.length;i++){ tempGenotype[i] = tempChrom.getGenotype(realPos[i]); } for(int i=0;i<pos.length;i++) { tempChrom.setGenotype(tempGenotype[i],i); } } } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; a1 = a2; a2 = temp; } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0, a1, a2, (String)extras.elementAt(i))); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
tempVect.add(new Double(45)); tempVect.add(new Double(45));
tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet()));
public CheckDataPanel(File file) throws IOException{ //okay, for now we're going to assume the ped file has no header Vector pedFileStrings = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(file)); String line; while((line = reader.readLine())!=null){ pedFileStrings.add(line); } pedfile = new PedFile(); pedfile.parse(pedFileStrings); //Vector result = data.check(); Vector result = pedfile.check(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("Name"); tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] ratingArray = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(currentResult.getName());// tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(45)); tempVect.add(new Double(45));// tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping ratingArray[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } final CheckDataTableModel tableModel = new CheckDataTableModel(tableColumnNames, tableData, ratingArray); tableModel.addTableModelListener(this); table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(100); JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
if (e.getSource() == table) {
if (e.getSource() == table.getColumnsFolder()) {
public void dbObjectChanged(SQLObjectEvent e) { if (e.getSource() == table) { int[] changedIndices = e.getChangedIndices(); for (int i = 0; i < changedIndices.length; i++) { // XXX: should group contiguous regions into one event! logger.debug("Firing contentsChanged event for index "+i); fireContentsChanged(changedIndices[i], changedIndices[i]); } } else if (e.getSource() instanceof SQLColumn) { // make sure this column was actually in the table try { int index = table.getColumns().indexOf(e.getSource()); if (index >= 0) { fireContentsChanged(index, index); } } catch (ArchitectException ex) { logger.error("Exception in dbObjectChanged",ex); } } else { logger.warn("Unexpected SQLObjectEvent: "+e); } }
return table.getChildren().get(index);
return table.getColumnsFolder().getChild(index);
public Object getElementAt(int index) { try { return table.getChildren().get(index); } catch (ArchitectException ex) { throw new RuntimeException("Couldn't get child "+index, ex); } }
return table.getChildCount();
return table.getColumnsFolder().getChildCount();
public int getSize() { try { return table.getChildCount(); } catch (ArchitectException ex) { throw new RuntimeException("Couldn't get child count", ex); } }
} try { ArchitectUtils.unlistenToHierarchy(this, e.getChildren()); if (columnSelection.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync selection list: selection="+columnSelection+"; children="+this.model.getColumns()); columnSelection = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnSelection.add(Boolean.FALSE);
try { if (columnSelection.size() != this.model.getColumns().size()) { throw new IllegalStateException("out-of-sync selection list (event source="+e.getSource()+"): selection="+columnSelection+"; children="+this.model.getColumns());
public void dbChildrenRemoved(SQLObjectEvent e) { if (e.getSource() == this.model.getColumnsFolder()) { int ci[] = e.getChangedIndices(); logger.debug("Columns removed. Syncing select/highlight lists. Removed indices="+Arrays.asList(ci)); for (int i = 0; i < ci.length; i++) { columnSelection.remove(ci[i]); columnHighlight.remove(ci[i]); } if (columnSelection.size() > 0) { selectNone(); columnSelection.set(Math.min(ci[0], columnSelection.size()-1), Boolean.TRUE); } } try { ArchitectUtils.unlistenToHierarchy(this, e.getChildren()); if (columnSelection.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync selection list: selection="+columnSelection+"; children="+this.model.getColumns()); columnSelection = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnSelection.add(Boolean.FALSE); } } if (columnHighlight.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync highlight list: highlights="+columnHighlight+"; children="+this.model.getColumns()); columnHighlight = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnHighlight.add(null); } } } catch (ArchitectException ex) { logger.error("Couldn't remove children", ex); JOptionPane.showMessageDialog(getPlayPen(), "Couldn't delete column: "+ex.getMessage()); } firePropertyChange("model.children", null, null); revalidate(); }
if (columnHighlight.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync highlight list: highlights="+columnHighlight+"; children="+this.model.getColumns()); columnHighlight = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnHighlight.add(null); } } } catch (ArchitectException ex) { logger.error("Couldn't remove children", ex); JOptionPane.showMessageDialog(getPlayPen(), "Couldn't delete column: "+ex.getMessage());
public void dbChildrenRemoved(SQLObjectEvent e) { if (e.getSource() == this.model.getColumnsFolder()) { int ci[] = e.getChangedIndices(); logger.debug("Columns removed. Syncing select/highlight lists. Removed indices="+Arrays.asList(ci)); for (int i = 0; i < ci.length; i++) { columnSelection.remove(ci[i]); columnHighlight.remove(ci[i]); } if (columnSelection.size() > 0) { selectNone(); columnSelection.set(Math.min(ci[0], columnSelection.size()-1), Boolean.TRUE); } } try { ArchitectUtils.unlistenToHierarchy(this, e.getChildren()); if (columnSelection.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync selection list: selection="+columnSelection+"; children="+this.model.getColumns()); columnSelection = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnSelection.add(Boolean.FALSE); } } if (columnHighlight.size() != this.model.getColumns().size()) { logger.error("Repairing out-of-sync highlight list: highlights="+columnHighlight+"; children="+this.model.getColumns()); columnHighlight = new ArrayList(); for (int j = 0; j < model.getColumns().size(); j++) { columnHighlight.add(null); } } } catch (ArchitectException ex) { logger.error("Couldn't remove children", ex); JOptionPane.showMessageDialog(getPlayPen(), "Couldn't delete column: "+ex.getMessage()); } firePropertyChange("model.children", null, null); revalidate(); }
document = parseBody(output);
String text = getText(); if (text != null) { document = parseText(text); } else { document = parseBody(output); }
public void doTag(XMLOutput output) throws Exception { if (getVar() == null) { throw new IllegalArgumentException("The var attribute cannot be null"); } Document document = null; if (html == null) { document = parseBody(output); } else { document = parse(html); } context.setVariable(getVar(), document); }
return new PhotoCollectionTransferable(sourcePhotos);
PhotoCollection sourceCollection = view.getCollection(); return new PhotoCollectionTransferable( sourcePhotos );
protected Transferable createTransferable(JComponent c) { log.warn( "createTransferable" ); Collection selection = view.getSelection(); sourcePhotos = new PhotoInfo[selection.size()]; Iterator iter = selection.iterator(); int i = 0; while ( iter.hasNext() ) { sourcePhotos[i] = (PhotoInfo) iter.next(); i++; } log.warn( "" + i + " photos selected" );// shouldRemove = true; return new PhotoCollectionTransferable(sourcePhotos); }
PhotoCollection coll = view.getCollection(); if ( (action == MOVE) && coll instanceof PhotoFolder ) { PhotoFolder folder = (PhotoFolder) coll;
PhotoCollection collection = view.getCollection(); if ( (collection != lastImportTarget) && (action == MOVE) && collection instanceof PhotoFolder ) { PhotoFolder folder = (PhotoFolder) collection;
protected void exportDone(JComponent c, Transferable data, int action) { PhotoCollection coll = view.getCollection(); if (/*shouldRemove && */ (action == MOVE) && coll instanceof PhotoFolder ) { PhotoFolder folder = (PhotoFolder) coll; for ( int i = 0; i < sourcePhotos.length; i++ ) { folder.removePhoto( sourcePhotos[i] ); } } }
lastImportTarget = null;
protected void exportDone(JComponent c, Transferable data, int action) { PhotoCollection coll = view.getCollection(); if (/*shouldRemove && */ (action == MOVE) && coll instanceof PhotoFolder ) { PhotoFolder folder = (PhotoFolder) coll; for ( int i = 0; i < sourcePhotos.length; i++ ) { folder.removePhoto( sourcePhotos[i] ); } } }
lastImportTarget = folder;
public boolean importData(JComponent c, Transferable t) { log.warn( "importData" ); if (canImport(c, t.getTransferDataFlavors())) { //Don't drop on myself.// if (source == c) {// shouldRemove = false;// return true;// } PhotoCollection collection = view.getCollection(); if ( collection instanceof PhotoFolder ) { log.warn( "importing" ); // Photos were dropped to a folder so we can insert them PhotoFolder folder = (PhotoFolder) collection; try { PhotoInfo[] photos = (PhotoInfo[])t.getTransferData(photoInfoFlavor); for ( int n = 0; n < photos.length; n++ ) { folder.addPhoto( photos[n] ); } return true; } catch (UnsupportedFlavorException ufe) { log.warn("importData: unsupported data flavor"); } catch (IOException ioe) { log.warn("importData: I/O exception"); } } } return false; }
dPrimeTable = new PairwiseLinkage[((Chromosome) chromosomes.firstElement()).size()][((Chromosome) chromosomes.firstElement()).size()];
dPrimeTable = new PairwiseLinkage[Chromosome.size()][Chromosome.size()];
void generateDPrimeTable(long maxdist){ //calculating D prime requires the number of each possible 2 marker //haplotype in the dataset dPrimeTable = new PairwiseLinkage[((Chromosome) chromosomes.firstElement()).size()][((Chromosome) chromosomes.firstElement()).size()]; int doublehet; long negMaxdist = -1*maxdist; int[][] twoMarkerHaplos = new int[3][3]; //loop through all marker pairs for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ //clear the array for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = ((SNP)markerInfo.elementAt(pos1)).getPosition() - ((SNP)markerInfo.elementAt(pos2)).getPosition(); if (sep > maxdist || sep < negMaxdist){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,-99,0,0,0,nullArray); continue; } for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).elementAt(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) dPrimeTable[pos1][pos2] = computeDPrime(twoMarkerHaplos[1][1], twoMarkerHaplos[1][2], twoMarkerHaplos[2][1], twoMarkerHaplos[2][2], doublehet, 0.1); } } }
long sep = ((SNP)markerInfo.elementAt(pos1)).getPosition() - ((SNP)markerInfo.elementAt(pos2)).getPosition(); if (sep > maxdist || sep < negMaxdist){
long sep = Chromosome.getMarker(pos1).getPosition() - Chromosome.getMarker(pos2).getPosition(); if ((sep > maxdist || sep < negMaxdist) && markersLoaded){
void generateDPrimeTable(long maxdist){ //calculating D prime requires the number of each possible 2 marker //haplotype in the dataset dPrimeTable = new PairwiseLinkage[((Chromosome) chromosomes.firstElement()).size()][((Chromosome) chromosomes.firstElement()).size()]; int doublehet; long negMaxdist = -1*maxdist; int[][] twoMarkerHaplos = new int[3][3]; //loop through all marker pairs for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ //clear the array for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = ((SNP)markerInfo.elementAt(pos1)).getPosition() - ((SNP)markerInfo.elementAt(pos2)).getPosition(); if (sep > maxdist || sep < negMaxdist){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,-99,0,0,0,nullArray); continue; } for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).elementAt(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).elementAt(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) dPrimeTable[pos1][pos2] = computeDPrime(twoMarkerHaplos[1][1], twoMarkerHaplos[1][2], twoMarkerHaplos[2][1], twoMarkerHaplos[2][2], doublehet, 0.1); } } }
case 0: returnVec = new FindBlocks(dPrimeTable, markerInfo).doSFS(); break;
case 0: returnVec = new FindBlocks(dPrimeTable).doSFS(); break;
void guessBlocks(int method){ Vector returnVec = new Vector(); switch(method){ case 0: returnVec = new FindBlocks(dPrimeTable, markerInfo).doSFS(); break; case 1: returnVec = new FindBlocks(dPrimeTable).do4Gamete(); break; case 2: returnVec = new FindBlocks(dPrimeTable).doMJD(); break; } blocks = returnVec; }
if (markerResults[i]){
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; for (int i = 0; i < numMarkers ; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; //return chrom; }
}
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; for (int i = 0; i < numMarkers ; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; //return chrom; }
for (int i = 0; i < numMarkers ; i++){
int count = 0; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ count++; } } Chromosome.realIndex = new int[count]; int k = 0; for (int i =0; i < numMarkers; i++){ if (markerResults[i]){ Chromosome.realIndex[k] = i; k++; } } Vector markerInfo = new Vector(); for (int i = 0; i < numMarkers; i++){
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; for (int i = 0; i < numMarkers ; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; //return chrom; }
byte thisAllele = ((Chromosome)chrom.elementAt(j)).elementAt(i);
byte thisAllele = ((Chromosome)chrom.elementAt(j)).unfilteredElementAt(i);
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; for (int i = 0; i < numMarkers ; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; //return chrom; }
Chromosome.markers = markerInfo.toArray();
public void linkageToChrom(boolean[] markerResults, PedFile pedFile){ Vector indList = pedFile.getOrder(); int numMarkers = 0; Vector usedParents = new Vector(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); for(int x=0; x < indList.size(); x++){ String[] indAndFamID = (String[])indList.elementAt(x); currentFamily = pedFile.getFamily(indAndFamID[0]); currentInd = currentFamily.getMember(indAndFamID[1]); if(currentInd.getIsTyped()){ //singleton if(currentFamily.getNumMembers() == 1){ numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == thisMarker[1]){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = 5; chrom2[i] = 5; } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2)); } else{ //skip if indiv is parent in trio or unaffected if (!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0") || currentInd.getAffectedStatus() != 2)){ //trio if (!(usedParents.contains( currentInd.getFamilyID() + " " + currentInd.getMomID()) || usedParents.contains(currentInd.getFamilyID() + " " + currentInd.getDadID()))){ //add 4 phased haps provided that we haven't used this trio already numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ if (markerResults[i]){ byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else { dadTb[i] = 5; dadUb[i] = 5; } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else { momTb[i] = 5; momUb[i] = 5; } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 5; momUb[i] = 5; } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = 5; dadUb[i] = 5; momTb[i] = 5; momUb[i] = 5; } } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb)); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getDadID()); usedParents.add(currentInd.getFamilyID()+" "+currentInd.getMomID()); } } } } } double numChroms = chrom.size(); numBadGenotypes = new double[numMarkers]; percentBadGenotypes = new double[numMarkers]; for (int i = 0; i < numMarkers ; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chrom.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chrom.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } if (thisAllele == 0) { numBadGenotypes[i] ++; } } double maf = numa1 / (numa2+numa1) ; if (maf > 0.5) { maf = 1.0-maf; } markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chrom; //return chrom; }
byte thisAllele = ((Chromosome)chroms.elementAt(j)).elementAt(i);
byte thisAllele = ((Chromosome)chroms.elementAt(j)).unfilteredElementAt(i);
void prepareGenotypeInput(File infile) throws IOException{ //this method is called to suck in data from a file (its only argument) //of genotypes and return a vector of Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); boolean firstTime = true; while ((currentLine = in.readLine()) != null){ //each line is expected to be of the format: //ped indiv geno geno geno geno... StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv ped = st.nextToken(); indiv = st.nextToken(); //all other tokens are loaded into a vector (they should all be genotypes) //the first time through, count number of genotypes for marker quality statistics if (firstTime){ numBadGenotypes = new double[st.countTokens()]; percentBadGenotypes = new double[st.countTokens()]; } genos = new byte[st.countTokens()]; int q = 0; while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 5; }else{ genos[q] = Byte.parseByte(thisGenotype); } if (genos[q] == 0) numBadGenotypes[q] ++; q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, infile.getName())); firstTime = false; } //generate marker information in case none is subsequently available //also convert sums of bad genotypes to percentages for each marker double numChroms = chroms.size(); for (int i = 0; i < genos.length; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chroms.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chroms.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chroms; //return chroms; }
Chromosome.markers = markerInfo.toArray();
void prepareGenotypeInput(File infile) throws IOException{ //this method is called to suck in data from a file (its only argument) //of genotypes and return a vector of Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); boolean firstTime = true; while ((currentLine = in.readLine()) != null){ //each line is expected to be of the format: //ped indiv geno geno geno geno... StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv ped = st.nextToken(); indiv = st.nextToken(); //all other tokens are loaded into a vector (they should all be genotypes) //the first time through, count number of genotypes for marker quality statistics if (firstTime){ numBadGenotypes = new double[st.countTokens()]; percentBadGenotypes = new double[st.countTokens()]; } genos = new byte[st.countTokens()]; int q = 0; while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 5; }else{ genos[q] = Byte.parseByte(thisGenotype); } if (genos[q] == 0) numBadGenotypes[q] ++; q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, infile.getName())); firstTime = false; } //generate marker information in case none is subsequently available //also convert sums of bad genotypes to percentages for each marker double numChroms = chroms.size(); for (int i = 0; i < genos.length; i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chroms.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chroms.elementAt(j)).elementAt(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } chromosomes = chroms; //return chroms; }
byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).elementAt(snpcount);
byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).unfilteredElementAt(snpcount);
int prepareMarkerInput(File infile) throws IOException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position String currentLine; Vector markers = new Vector(); //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; while ((currentLine = in.readLine()) != null){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).elementAt(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } //System.out.println(numa1 + " " + numa2); double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; StringTokenizer st = new StringTokenizer(currentLine); markers.add(new SNP(st.nextToken(), Long.parseLong(st.nextToken()), infile.getName(), maf)); snpcount ++; } if (markerInfo.size() == markers.size()){ markerInfo = markers; return 1; }else{ return -1; } }
if (markerInfo.size() == markers.size()){ markerInfo = markers;
if (Chromosome.markers.length == markers.size()){ Chromosome.markers = markers.toArray(); markersLoaded = true;
int prepareMarkerInput(File infile) throws IOException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position String currentLine; Vector markers = new Vector(); //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; while ((currentLine = in.readLine()) != null){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).elementAt(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } //System.out.println(numa1 + " " + numa2); double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; StringTokenizer st = new StringTokenizer(currentLine); markers.add(new SNP(st.nextToken(), Long.parseLong(st.nextToken()), infile.getName(), maf)); snpcount ++; } if (markerInfo.size() == markers.size()){ markerInfo = markers; return 1; }else{ return -1; } }
log.debug( "javaToSql: " + src.getClass().getName() + " " + src );
log.debug( "javaToSql: " + src );
public Object javaToSql( Object src ) { log.debug( "javaToSql: " + src.getClass().getName() + " " + src ); if ( src instanceof Date ) { return new Timestamp( ((Date)src).getTime() ); } return src; }
log.debug( "sqlToJava: " + src.getClass().getName() + " " + src );
log.debug( "sqlToJava: " + src );
public Object sqlToJava( Object src ) { log.debug( "sqlToJava: " + src.getClass().getName() + " " + src ); if ( src instanceof Timestamp ) { return new Date( ((Timestamp)src).getTime() ); } return src; }
System.out.println("DBTree: got dragDropEnd event");
public void dragDropEnd(DragSourceDropEvent dsde) { System.out.println("DBTree: got dragDropEnd event"); }
System.out.println("DBTree: got dragEnter event");
public void dragEnter(DragSourceDragEvent dsde) { System.out.println("DBTree: got dragEnter event"); }
System.out.println("DBTree: got dragExit event");
public void dragExit(DragSourceEvent dse) { System.out.println("DBTree: got dragExit event"); }
System.out.println("DBTree: got dragOver event");
public void dragOver(DragSourceDragEvent dsde) { System.out.println("DBTree: got dragOver event"); }
System.out.println("DBTree: got dropActionChanged event");
public void dropActionChanged(DragSourceDragEvent dsde) { System.out.println("DBTree: got dropActionChanged event"); }
public void run(Context context, XMLOutput output) throws Exception {
public void run(JellyContext context, XMLOutput output) throws Exception {
public void run(Context context, XMLOutput output) throws Exception { output.write( text ); }
Vector result = null;
void readGenotypes(String[] inputOptions, int type){ //input is a 2 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); try { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_TDT_NUM].setEnabled(false); Options.setAssocTest(ASSOC_NONE); } theData = new HaploData(); Vector result = null; if (type == HAPS){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ result = theData.linkageToChrom(inFile, type); } if(type != HAPS && theData.getPedFile().isBogusParents()) { JOptionPane.showMessageDialog(this, "One or more individuals in the file reference non-existent parents.\nThese references have been ignored.", "File Error", JOptionPane.ERROR_MESSAGE); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1] == null){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } checkPanel = null; if (type == HAPS){ readMarkers(markerFile, null); //initialize realIndex Chromosome.doFilter(Chromosome.getUnfilteredSize()); }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(this); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); boolean[] markerResults = new boolean[result.size()]; for (int i = 0; i < result.size(); i++){ if (((MarkerResult)result.get(i)).getRating() > 0 && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2){ markerResults[i] = true; }else{ markerResults[i] = false; } } //set up the indexing to take into account skipped markers. Chromosome.doFilter(markerResults); } //let's start the math final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; changeKey(); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); displayMenu.setEnabled(true); analysisMenu.setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(Options.getAssocTest() != ASSOC_NONE) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; JTabbedPane metaAssoc = new JTabbedPane(); try{ tdtPanel = new TDTPanel(theData.getPedFile()); } catch(PedFileException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } metaAssoc.add("Single Marker", tdtPanel); HaploAssocPanel htp = new HaploAssocPanel(theData.getHaplotypes()); metaAssoc.add("Haplotypes", htp); tabs.addTab("Association Results", metaAssoc); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } }
result = theData.linkageToChrom(inFile, type);
theData.linkageToChrom(inFile, type);
void readGenotypes(String[] inputOptions, int type){ //input is a 2 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); try { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_TDT_NUM].setEnabled(false); Options.setAssocTest(ASSOC_NONE); } theData = new HaploData(); Vector result = null; if (type == HAPS){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ result = theData.linkageToChrom(inFile, type); } if(type != HAPS && theData.getPedFile().isBogusParents()) { JOptionPane.showMessageDialog(this, "One or more individuals in the file reference non-existent parents.\nThese references have been ignored.", "File Error", JOptionPane.ERROR_MESSAGE); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1] == null){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } checkPanel = null; if (type == HAPS){ readMarkers(markerFile, null); //initialize realIndex Chromosome.doFilter(Chromosome.getUnfilteredSize()); }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(this); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); boolean[] markerResults = new boolean[result.size()]; for (int i = 0; i < result.size(); i++){ if (((MarkerResult)result.get(i)).getRating() > 0 && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2){ markerResults[i] = true; }else{ markerResults[i] = false; } } //set up the indexing to take into account skipped markers. Chromosome.doFilter(markerResults); } //let's start the math final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; changeKey(); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); displayMenu.setEnabled(true); analysisMenu.setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(Options.getAssocTest() != ASSOC_NONE) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; JTabbedPane metaAssoc = new JTabbedPane(); try{ tdtPanel = new TDTPanel(theData.getPedFile()); } catch(PedFileException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } metaAssoc.add("Single Marker", tdtPanel); HaploAssocPanel htp = new HaploAssocPanel(theData.getHaplotypes()); metaAssoc.add("Haplotypes", htp); tabs.addTab("Association Results", metaAssoc); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } }
Vector result = theData.getPedFile().getResults();
void readGenotypes(String[] inputOptions, int type){ //input is a 2 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); try { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_TDT_NUM].setEnabled(false); Options.setAssocTest(ASSOC_NONE); } theData = new HaploData(); Vector result = null; if (type == HAPS){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ result = theData.linkageToChrom(inFile, type); } if(type != HAPS && theData.getPedFile().isBogusParents()) { JOptionPane.showMessageDialog(this, "One or more individuals in the file reference non-existent parents.\nThese references have been ignored.", "File Error", JOptionPane.ERROR_MESSAGE); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1] == null){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } checkPanel = null; if (type == HAPS){ readMarkers(markerFile, null); //initialize realIndex Chromosome.doFilter(Chromosome.getUnfilteredSize()); }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(this); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); boolean[] markerResults = new boolean[result.size()]; for (int i = 0; i < result.size(); i++){ if (((MarkerResult)result.get(i)).getRating() > 0 && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2){ markerResults[i] = true; }else{ markerResults[i] = false; } } //set up the indexing to take into account skipped markers. Chromosome.doFilter(markerResults); } //let's start the math final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; changeKey(); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); displayMenu.setEnabled(true); analysisMenu.setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(Options.getAssocTest() != ASSOC_NONE) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; JTabbedPane metaAssoc = new JTabbedPane(); try{ tdtPanel = new TDTPanel(theData.getPedFile()); } catch(PedFileException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } metaAssoc.add("Single Marker", tdtPanel); HaploAssocPanel htp = new HaploAssocPanel(theData.getHaplotypes()); metaAssoc.add("Haplotypes", htp); tabs.addTab("Association Results", metaAssoc); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } }
} else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++; else if (a1 >= 5){
} else if (((a1 >= 5 || b1 >= 5) && (a2 >= 5 || b2 >= 5)) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))){ doublehet++; } else if (a1 >= 5 || b1 >= 5){
public PairwiseLinkage computeDPrime(int pos1, int pos2){ int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } //check for non-polymorphic markers if (Chromosome.getUnfilteredMarker(pos1).getMAF() == 0 || Chromosome.getUnfilteredMarker(pos2).getMAF() == 0){ return null; } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[Chromosome.getUnfilteredMarker(pos1).getMajor()]=1; marker1num[Chromosome.getUnfilteredMarker(pos1).getMinor()]=2; marker2num[0]=0; marker2num[Chromosome.getUnfilteredMarker(pos2).getMajor()]=1; marker2num[Chromosome.getUnfilteredMarker(pos2).getMinor()]=2; byte a1,a2,b1,b2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables a1 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos1]; a2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; b1 = ((Chromosome) chromosomes.elementAt(++i)).genotypes[pos1]; b2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 >= 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 >= 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[101]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]))/LN10 + ((double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; loglike0 = (known[AA]*Math.log(pA1*pA2) + known[AB]*Math.log(pA1*pB2) + known[BA]*Math.log(pB1*pA2) + known[BB]*Math.log(pB1*pB2) + (double)unknownDH*Math.log(2*pA1*pA2*pB1*pB2))/LN10; num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ //done in this slightly asinine way because of a compiler bugz0r in the dec-alpha version of java //which causes it to try to parallelize the swapping operations and mis-schedules them pA2 = pA2 + pB2; pB2 = pA2 - pB2; pA2 = pA2 - pB2; //pA2=pB2;pB2=temp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = (known[AA]*Math.log(tmpAA) + known[AB]*Math.log(tmpAB) + known[BA]*Math.log(tmpBA) + known[BB]*Math.log(tmpBB) + (double)unknownDH*Math.log(tmpAA*tmpBB + tmpAB*tmpBA))/LN10; } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
} else if (a2 >= 5){
} else if (a2 >= 5 || b2 >= 5){
public PairwiseLinkage computeDPrime(int pos1, int pos2){ int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } //check for non-polymorphic markers if (Chromosome.getUnfilteredMarker(pos1).getMAF() == 0 || Chromosome.getUnfilteredMarker(pos2).getMAF() == 0){ return null; } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[Chromosome.getUnfilteredMarker(pos1).getMajor()]=1; marker1num[Chromosome.getUnfilteredMarker(pos1).getMinor()]=2; marker2num[0]=0; marker2num[Chromosome.getUnfilteredMarker(pos2).getMajor()]=1; marker2num[Chromosome.getUnfilteredMarker(pos2).getMinor()]=2; byte a1,a2,b1,b2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables a1 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos1]; a2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; b1 = ((Chromosome) chromosomes.elementAt(++i)).genotypes[pos1]; b2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 >= 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 >= 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[101]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]))/LN10 + ((double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; loglike0 = (known[AA]*Math.log(pA1*pA2) + known[AB]*Math.log(pA1*pB2) + known[BA]*Math.log(pB1*pA2) + known[BB]*Math.log(pB1*pB2) + (double)unknownDH*Math.log(2*pA1*pA2*pB1*pB2))/LN10; num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ //done in this slightly asinine way because of a compiler bugz0r in the dec-alpha version of java //which causes it to try to parallelize the swapping operations and mis-schedules them pA2 = pA2 + pB2; pB2 = pA2 - pB2; pA2 = pA2 - pB2; //pA2=pB2;pB2=temp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = (known[AA]*Math.log(tmpAA) + known[AB]*Math.log(tmpAB) + known[BA]*Math.log(tmpBA) + known[BB]*Math.log(tmpBB) + (double)unknownDH*Math.log(tmpAA*tmpBB + tmpAB*tmpBA))/LN10; } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
handlerConfig.addChild(createRemoteManagerHandlerChainConfiguration());
handlerConfig.addChild(createHandlerChainConfiguration());
public void init() { setAttribute("enabled", true); addChild(getValuedConfiguration("port", "" + m_smtpListenerPort)); DefaultConfiguration handlerConfig = new DefaultConfiguration("handler"); handlerConfig.addChild(getValuedConfiguration("helloName", "myMailServer")); handlerConfig.addChild(getValuedConfiguration("connectiontimeout", "360000")); handlerConfig.addChild(getValuedConfiguration("authorizedAddresses", m_authorizedAddresses)); handlerConfig.addChild(getValuedConfiguration("maxmessagesize", "" + 0)); handlerConfig.addChild(getValuedConfiguration("authRequired", m_authorizingMode)); handlerConfig.addChild(createRemoteManagerHandlerChainConfiguration()); addChild(handlerConfig); }
profileFunctionMap.put("DATE", new ProfileFunctionDescriptor("DATE", Types.DATE, true,true,true,false,true,true,true,true));
protected void createProfileFunctionMap() { profileFunctionMap = new HashMap(); profileFunctionMap.put("BIT", new ProfileFunctionDescriptor("BIT", Types.BIT, true,true,true,false,true,true,true,true)); profileFunctionMap.put("CHAR", new ProfileFunctionDescriptor("CHAR", Types.CHAR, true,true,true,false,true,true,true,true)); profileFunctionMap.put("BINARY", new ProfileFunctionDescriptor("BINARY", Types.BINARY, true,true,true,false,true,true,true,true)); profileFunctionMap.put("VARBINARY", new ProfileFunctionDescriptor("VARBINARY", Types.VARBINARY, true,true,true,false,true,true,true,true)); profileFunctionMap.put("RAW", new ProfileFunctionDescriptor("VARBINARY", Types.VARBINARY, true,true,true,false,true,true,true,true)); profileFunctionMap.put("VARCHAR", new ProfileFunctionDescriptor("VARCHAR", Types.VARCHAR, true,true,true,false,true,true,true,true)); profileFunctionMap.put("VARCHAR2",new ProfileFunctionDescriptor("VARCHAR", Types.VARCHAR, true,true,true,false,true,true,true,true)); profileFunctionMap.put("NVARCHAR2",new ProfileFunctionDescriptor("NVARCHAR", Types.VARCHAR, true,true,true,false,true,true,true,true)); profileFunctionMap.put("BLOB", new ProfileFunctionDescriptor("BLOB", Types.BLOB, false,false,false,false,false,false,false,false)); profileFunctionMap.put("CLOB", new ProfileFunctionDescriptor("CLOB", Types.CLOB, false,false,false,false,false,false,false,false)); profileFunctionMap.put("BIGINT", new ProfileFunctionDescriptor("BIGINT", Types.BIGINT, true,true,true,true,true,true,true,true)); profileFunctionMap.put("DECIMAL", new ProfileFunctionDescriptor("DECIMAL", Types.DECIMAL, true,true,true,true,true,true,true,true)); profileFunctionMap.put("DOUBLE", new ProfileFunctionDescriptor("DOUBLE", Types.DOUBLE, true,true,true,true,true,true,true,true)); profileFunctionMap.put("FLOAT", new ProfileFunctionDescriptor("FLOAT", Types.FLOAT, true,true,true,true,true,true,true,true)); profileFunctionMap.put("INTEGER", new ProfileFunctionDescriptor("INTEGER", Types.INTEGER, true,true,true,true,true,true,true,true)); profileFunctionMap.put("NUMERIC", new ProfileFunctionDescriptor("NUMERIC", Types.NUMERIC, true,true,true,true,true,true,true,true)); profileFunctionMap.put("NUMBER", new ProfileFunctionDescriptor("NUMERIC", Types.NUMERIC, true,true,true,true,true,true,true,true)); profileFunctionMap.put("REAL", new ProfileFunctionDescriptor("REAL", Types.REAL, true,true,true,true,true,true,true,true)); profileFunctionMap.put("SMALLINT", new ProfileFunctionDescriptor("SMALLINT", Types.SMALLINT, true,true,true,true,true,true,true,true)); profileFunctionMap.put("TINYINT", new ProfileFunctionDescriptor("TINYINT", Types.TINYINT, true,true,true,true,true,true,true,true)); profileFunctionMap.put("INTERVALDS", new ProfileFunctionDescriptor("TINYINT", Types.TINYINT, true,true,true,true,true,true,true,true)); profileFunctionMap.put("INTERVALYM", new ProfileFunctionDescriptor("TINYINT", Types.TINYINT, true,true,true,true,true,true,true,true)); profileFunctionMap.put("TIME", new ProfileFunctionDescriptor("TIME", Types.TIME, true,true,true,false,true,true,true,true)); profileFunctionMap.put("TIMESTAMP", new ProfileFunctionDescriptor("TIMESTAMP", Types.TIMESTAMP, true,true,true,false,true,true,true,true)); profileFunctionMap.put("TIMESTAMP WITH LOCAL TIME ZONE", new ProfileFunctionDescriptor("TIMESTAMP", Types.TIMESTAMP, true,true,true,false,true,true,true,true)); profileFunctionMap.put("TIMESTAMP WITH TIME ZONE", new ProfileFunctionDescriptor("TIMESTAMP", Types.TIMESTAMP, true,true,true,false,true,true,true,true)); // profileFunctionMap.put("DATE", new ProfileFunctionDescriptor("DATE", Types.DATE, true,true,true,false,true,true,true,true)); profileFunctionMap.put("LONG", new ProfileFunctionDescriptor("LONGVARCHAR", Types.LONGVARCHAR, false,false,false,false,false,false,false,false)); profileFunctionMap.put("LONG RAW", new ProfileFunctionDescriptor("LONGVARBINARY", Types.LONGVARBINARY, false,false,false,false,false,false,false,false)); profileFunctionMap.put("STRUCT", new ProfileFunctionDescriptor("LONGVARBINARY", Types.LONGVARBINARY, false,false,false,false,false,false,false,false)); profileFunctionMap.put("ARRAY", new ProfileFunctionDescriptor("LONGVARBINARY", Types.LONGVARBINARY, false,false,false,false,false,false,false,false)); profileFunctionMap.put("REF", new ProfileFunctionDescriptor("LONGVARBINARY", Types.LONGVARBINARY, false,false,false,false,false,false,false,false)); }
configureTag(tag,context);
public Tag getTag(JellyContext context) throws JellyException { Tag tag = context.getTagOfTagScript(this); if ( tag == null ) { tag = createTag(); if ( tag != null ) { context.setTagForScript(this,tag); } } configureTag(tag,context); return tag; }
plot.setSectionPaint(0, Color.RED); plot.setSectionPaint(1, Color.GREEN);
private static JFreeChart createChart(final PieDataset dataset) { final JFreeChart chart = ChartFactory.createPieChart( null, // chart title dataset, // data false, // include legend false, false); // E6EEF9 chart.setBackgroundPaint(new Color(230, 238, 249)); chart.setBorderVisible(false); chart.setBorderPaint(new Color(230, 238, 249)); final PiePlot plot = (PiePlot) chart.getPlot(); plot.setLabelFont(new Font("SansSerif", Font.PLAIN, 10)); plot.setNoDataMessage("No data available"); plot.setCircular(false); plot.setLabelLinkPaint(Color.red); plot.setLabelGap(0.02); plot.setBackgroundPaint(new Color(230, 238, 249)); //plot.set return chart; }
throw(new HaploViewException("Info file error:\nToo many markers"));
throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file."));
void prepareMarkerInput(File infile, long maxdist) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; Vector markers = new Vector(); long negMaxdist = -1 * maxdist; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } if (lineCount > Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nToo many markers")); } //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).getGenotype(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; String name = st.nextToken(); String loc = st.nextToken(); try{ markers.add(new SNP(name, Long.parseLong(loc), infile.getName(), Math.rint(maf*100.0)/100.0)); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + loc + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } snpcount ++; } if (lineCount < Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nNot enough markers")); } Chromosome.markers = markers.toArray(); if (dPrimeTable != null){ //loop through the dprime table to null-out distant markers for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = Chromosome.getMarker(pos1).getPosition() - Chromosome.getMarker(pos2).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ dPrimeTable[pos1][pos2] = null; continue; } } } } filteredDPrimeTable = getFilteredTable(); } infoKnown=true; }else{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
throw(new HaploViewException("Info file error:\nNot enough markers"));
throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file."));
void prepareMarkerInput(File infile, long maxdist) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; Vector markers = new Vector(); long negMaxdist = -1 * maxdist; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } if (lineCount > Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nToo many markers")); } //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).getGenotype(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; String name = st.nextToken(); String loc = st.nextToken(); try{ markers.add(new SNP(name, Long.parseLong(loc), infile.getName(), Math.rint(maf*100.0)/100.0)); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + loc + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } snpcount ++; } if (lineCount < Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nNot enough markers")); } Chromosome.markers = markers.toArray(); if (dPrimeTable != null){ //loop through the dprime table to null-out distant markers for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = Chromosome.getMarker(pos1).getPosition() - Chromosome.getMarker(pos2).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ dPrimeTable[pos1][pos2] = null; continue; } } } } filteredDPrimeTable = getFilteredTable(); } infoKnown=true; }else{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
NumberFormat nf = NumberFormat.getInstance();
NumberFormat nf = NumberFormat.getInstance(Locale.US);
public void saveHapsToText(Haplotype[][] finishedHaplos, double[] multidprime, File saveHapsFile) throws IOException{ if (finishedHaplos == null) return; NumberFormat nf = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); //open file for saving haps text FileWriter saveHapsWriter = new FileWriter(saveHapsFile); //go through each block and print haplos for (int i = 0; i < finishedHaplos.length; i++){ //write block header saveHapsWriter.write("BLOCK " + (i+1) + ". MARKERS:"); int[] markerNums = finishedHaplos[i][0].getMarkers(); boolean[] tags = finishedHaplos[i][0].getTags(); for (int j = 0; j < markerNums.length; j++){ saveHapsWriter.write(" " + (markerNums[j]+1)); if (tags[j]) saveHapsWriter.write("!"); } saveHapsWriter.write("\n"); //write haps and crossover percentages for (int j = 0; j < finishedHaplos[i].length; j++){ int[] theGeno = finishedHaplos[i][j].getGeno(); StringBuffer theHap = new StringBuffer(theGeno.length); for (int k = 0; k < theGeno.length; k++){ theHap.append(theGeno[k]); } saveHapsWriter.write(theHap.toString() + " (" + nf.format(finishedHaplos[i][j].getPercentage()) + ")"); if (i < finishedHaplos.length-1){ saveHapsWriter.write("\t|"); for (int crossCount = 0; crossCount < finishedHaplos[i+1].length; crossCount++){ if (crossCount != 0) saveHapsWriter.write("\t"); saveHapsWriter.write(nf.format(finishedHaplos[i][j].getCrossover(crossCount))); } saveHapsWriter.write("|"); } saveHapsWriter.write("\n"); } if (i < finishedHaplos.length - 1){ saveHapsWriter.write("Multiallelic Dprime: " + multidprime[i] + "\n"); }else{ saveHapsWriter.write("\n"); } } saveHapsWriter.close(); }
table.getColumnModel().getColumn(1).setPreferredWidth(100);
table.getColumnModel().getColumn(1).setMinWidth(100);
public CheckDataPanel(HaploData hd, boolean disp) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getUnfilteredMarker(i).getName()); tempVect.add(new Long(Chromosome.getUnfilteredMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); if (disp){ table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); } }
tableScroller.setMaximumSize(new Dimension(600, tableScroller.getPreferredSize().height));
public CheckDataPanel(HaploData hd, boolean disp) throws IOException, PedFileException{ STATUS_COL = 8; setLayout(new BoxLayout(this,BoxLayout.Y_AXIS)); pedfile = hd.getPedFile(); theData = hd; Vector result = pedfile.getResults(); int numResults = result.size(); Vector tableColumnNames = new Vector(); tableColumnNames.add("#"); if (theData.infoKnown){ tableColumnNames.add("Name"); tableColumnNames.add("Position"); STATUS_COL += 2; } tableColumnNames.add("ObsHET"); tableColumnNames.add("PredHET"); tableColumnNames.add("HWpval"); tableColumnNames.add("%Geno"); tableColumnNames.add("FamTrio"); tableColumnNames.add("MendErr"); tableColumnNames.add("MAF"); tableColumnNames.add("Rating"); Vector tableData = new Vector(); int[] markerRatings = new int[numResults]; for (int i = 0; i < numResults; i++){ Vector tempVect = new Vector(); MarkerResult currentResult = (MarkerResult)result.get(i); tempVect.add(new Integer(i+1)); if (theData.infoKnown){ tempVect.add(Chromosome.getUnfilteredMarker(i).getName()); tempVect.add(new Long(Chromosome.getUnfilteredMarker(i).getPosition())); } tempVect.add(new Double(currentResult.getObsHet())); tempVect.add(new Double(currentResult.getPredHet())); tempVect.add(new Double(currentResult.getHWpvalue())); tempVect.add(new Double(currentResult.getGenoPercent())); tempVect.add(new Integer(currentResult.getFamTrioNum())); tempVect.add(new Integer(currentResult.getMendErrNum())); tempVect.add(new Double(currentResult.getMAF())); if (currentResult.getRating() > 0){ tempVect.add(new Boolean(true)); }else{ tempVect.add(new Boolean(false)); } //this value is never displayed, just kept for bookkeeping markerRatings[i] = currentResult.getRating(); tableData.add(tempVect.clone()); } tableModel = new CheckDataTableModel(tableColumnNames, tableData, markerRatings); tableModel.addTableModelListener(this); if (disp){ table = new JTable(tableModel); final CheckDataCellRenderer renderer = new CheckDataCellRenderer(); try{ table.setDefaultRenderer(Class.forName("java.lang.Double"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Integer"), renderer); table.setDefaultRenderer(Class.forName("java.lang.Long"), renderer); }catch (Exception e){ } table.getColumnModel().getColumn(0).setPreferredWidth(30); if (theData.infoKnown){ table.getColumnModel().getColumn(1).setPreferredWidth(100); } JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); } }
log.info("Registering tag library to: " + namespaceURI + " taglib: " + taglib); taglibs.put(namespaceURI, taglib); }
if (log.isDebugEnabled()) { log.debug("Registering tag library to: " + namespaceURI + " taglib: " + taglib); } taglibs.put(namespaceURI, taglib); }
public void registerTagLibrary(String namespaceURI, TagLibrary taglib) { log.info("Registering tag library to: " + namespaceURI + " taglib: " + taglib); taglibs.put(namespaceURI, taglib); }
for (int i = 0; i < inArray.length; i++){ inArray[i] = firstMarker+i; this.isInBlock[firstMarker+i] = true; }
public void addBlock(int firstMarker, int lastMarker) { if (firstMarker < 0){ firstMarker = 0; } if (lastMarker >= Chromosome.realIndex.length){ lastMarker = Chromosome.realIndex.length-1; } if (lastMarker - firstMarker < 1){ return; } int inArray[] = new int[lastMarker-firstMarker+1]; for (int i = 0; i < inArray.length; i++){ inArray[i] = firstMarker+i; this.isInBlock[firstMarker+i] = true; } blocksChanged = true; if (blocks.size() != 0){ boolean placed = false; for (int i = 0; i < blocks.size(); i++){ int currentBlock[] = (int[])blocks.elementAt(i); //trim out any blocks that are overlapped if ((lastMarker >= currentBlock[0] && firstMarker <= currentBlock[currentBlock.length-1]) || firstMarker <= currentBlock[currentBlock.length-1] && firstMarker >= currentBlock[0]){ blocks.removeElementAt(i); i--; } } for (int i = 0; i < blocks.size(); i++){ int currentBlock[] = (int[])blocks.elementAt(i); if (firstMarker <= currentBlock[0] && !placed){ blocks.insertElementAt(inArray,i); placed = true; } } if (!placed){ blocks.add(inArray); } }else{ blocks.add(inArray); } }
for (int i = 0; i < inArray.length; i++){ inArray[i] = firstMarker+i; this.isInBlock[firstMarker+i] = true; }
public void addBlock(int firstMarker, int lastMarker) { if (firstMarker < 0){ firstMarker = 0; } if (lastMarker >= Chromosome.realIndex.length){ lastMarker = Chromosome.realIndex.length-1; } if (lastMarker - firstMarker < 1){ return; } int inArray[] = new int[lastMarker-firstMarker+1]; for (int i = 0; i < inArray.length; i++){ inArray[i] = firstMarker+i; this.isInBlock[firstMarker+i] = true; } blocksChanged = true; if (blocks.size() != 0){ boolean placed = false; for (int i = 0; i < blocks.size(); i++){ int currentBlock[] = (int[])blocks.elementAt(i); //trim out any blocks that are overlapped if ((lastMarker >= currentBlock[0] && firstMarker <= currentBlock[currentBlock.length-1]) || firstMarker <= currentBlock[currentBlock.length-1] && firstMarker >= currentBlock[0]){ blocks.removeElementAt(i); i--; } } for (int i = 0; i < blocks.size(); i++){ int currentBlock[] = (int[])blocks.elementAt(i); if (firstMarker <= currentBlock[0] && !placed){ blocks.insertElementAt(inArray,i); placed = true; } } if (!placed){ blocks.add(inArray); } }else{ blocks.add(inArray); } }
StringBuffer name = new StringBuffer(); name.append("Project");
public Component getTreeCellRendererComponent(JTree tree, Object value, boolean sel, boolean expanded, boolean leaf, int row, boolean hasFocus) { setText(value.toString()); if (value instanceof SQLDatabase) { logger.debug("project: " + ArchitectFrame.getMainInstance().getProject()); if (ArchitectFrame.getMainInstance().getProject() == null || // getProject() is null when program is starting ArchitectFrame.getMainInstance().getProject().getTargetDatabase() == value) { setIcon(targetIcon); } else { setIcon(dbIcon); } } else if (value instanceof SQLCatalog) { if (((SQLCatalog) value).getNativeTerm().equals("owner")) { setIcon(ownerIcon); } else if (((SQLCatalog) value).getNativeTerm().equals("database")) { setIcon(dbIcon); } else if (((SQLCatalog) value).getNativeTerm().equals("schema")) { setIcon(schemaIcon); } else { setIcon(cataIcon); } } else if (value instanceof SQLSchema) { if (((SQLSchema) value).getNativeTerm().equals("owner")) { setIcon(ownerIcon); } else { setIcon(schemaIcon); } } else if (value instanceof SQLTable) { setIcon(tableIcon); if (((SQLTable) value).getObjectType() != null) { setText(((SQLTable) value).getName()+" ("+((SQLTable) value).getObjectType()+")"); } else { setText(((SQLTable) value).getName()); } } else if (value instanceof SQLRelationship) { setIcon(keyIcon); } else { setIcon(null); } this.selected = sel; this.hasFocus = hasFocus; if (value instanceof SQLObject) { if (((SQLObject) value).isPopulated()) { setForeground(Color.black); } else { setForeground(Color.lightGray); } } return this; }
Vector names = null; boolean withName=false; if(names!=null && names.size()==numOfMarkers) { withName = true; }
public Vector check() throws PedFileException{ Vector results = new Vector(); //_size = _pedFile.getNumIndividuals(); int numOfMarkers = _pedFile.getNumMarkers(); //not worrying about names right now //TODO: store and use marker names //Vector names = this._pedFile.getMarkerNames(); Vector names = null; boolean withName=false; if(names!=null && names.size()==numOfMarkers) { withName = true; } for(int i= 0; i < numOfMarkers; i++){ MarkerResult markerResult; if(withName) { markerResult = checkMarker(i, (String)names.get(i)); }else{ markerResult = checkMarker(i, new String("Marker " + (i+1))); } results.add(markerResult); } return results; }
MarkerResult markerResult; if(withName) { markerResult = checkMarker(i, (String)names.get(i)); }else{ markerResult = checkMarker(i, new String("Marker " + (i+1))); } results.add(markerResult);
results.add(checkMarker(i));
public Vector check() throws PedFileException{ Vector results = new Vector(); //_size = _pedFile.getNumIndividuals(); int numOfMarkers = _pedFile.getNumMarkers(); //not worrying about names right now //TODO: store and use marker names //Vector names = this._pedFile.getMarkerNames(); Vector names = null; boolean withName=false; if(names!=null && names.size()==numOfMarkers) { withName = true; } for(int i= 0; i < numOfMarkers; i++){ MarkerResult markerResult; if(withName) { markerResult = checkMarker(i, (String)names.get(i)); }else{ markerResult = checkMarker(i, new String("Marker " + (i+1))); } results.add(markerResult); } return results; }
private MarkerResult checkMarker(int loc, String name)throws PedFileException{
private MarkerResult checkMarker(int loc)throws PedFileException{
private MarkerResult checkMarker(int loc, String name)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; //int indivgeno=0, int missing=0, parenthet=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0; //Hashtable allgenos = new Hashtable(); Hashtable numindivs=new Hashtable(); Hashtable parentgeno = new Hashtable(); Hashtable kidgeno = new Hashtable(); Hashtable parenthom = new Hashtable(); Hashtable count = new Hashtable(); String allele1_string, allele2_string; //loop through each family, check data for marker loc Enumeration famList = _pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = _pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getIsTyped()){ byte[] markers = currentInd.getMarker(loc); allele1 = markers[0]; allele1_string = Integer.toString(allele1); allele2 = markers[1]; allele2_string = Integer.toString(allele2); String familyID = currentInd.getFamilyID(); incOrSetOne(numindivs,familyID); //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if(!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0"))){ //do mendel check //byte[] marker = ((Individual)pedFileHash.get(familyID + " " + currentInd.getMomID())).getMarker(loc); byte[] marker = (currentFamily.getMember(currentInd.getMomID())).getMarker(loc); int momAllele1 = marker[0]; int momAllele2 = marker[1]; //marker = ((Individual)pedFileHash.get(familyID + " " + currentInd.getDadID())).getMarker(loc); marker = (currentFamily.getMember(currentInd.getDadID())).getMarker(loc); int dadAllele1 = marker[0]; int dadAllele2 = marker[1]; //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } //end mendel check } } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getIsTyped()){ byte[] markers; byte[] zeroArray = {0,0}; if (currentInd.getZeroed(loc)){ markers = zeroArray; }else{ markers = currentInd.getMarker(loc); } allele1 = markers[0]; allele1_string = Integer.toString(allele1); allele2 = markers[1]; allele2_string = Integer.toString(allele2); String familyID = currentInd.getFamilyID(); incOrSetOne(numindivs,familyID); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has parents if(currentInd.getMomID().compareTo(Individual.DATA_MISSING)==0 && currentInd.getDadID().compareTo(Individual.DATA_MISSING)==0){ //$parentgeno{$ped}++ //set parentgeno incOrSetOne(parentgeno,familyID); if(allele1 != allele2) { parenthet++; } else{ incOrSetOne(parenthom,allele1_string); } } else{//$kidgeno{$ped}++ incOrSetOne(kidgeno,familyID); } if(allele1 == allele2) { hom++; } else { het++; } //count number of allele incOrSetOne(count,allele1_string); incOrSetOne(count,allele2_string); } //missing data else missing++; } } } double obsHET = getObsHET(het, hom); double[] freqStuff = getFreqStuff(count); double preHET = freqStuff[0]; double maf = freqStuff[1]; //HW p value double pvalue = getPValue(parenthom, parenthet); //geno percent double genopct = getGenoPercent(het, hom, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(numindivs, parentgeno, kidgeno); //rating int rating = this.getRating(genopct, pvalue, obsHET, mendErrNum,maf); result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); result.setName(name); return result; }
result.setName(name);
private MarkerResult checkMarker(int loc, String name)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; //int indivgeno=0, int missing=0, parenthet=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0; //Hashtable allgenos = new Hashtable(); Hashtable numindivs=new Hashtable(); Hashtable parentgeno = new Hashtable(); Hashtable kidgeno = new Hashtable(); Hashtable parenthom = new Hashtable(); Hashtable count = new Hashtable(); String allele1_string, allele2_string; //loop through each family, check data for marker loc Enumeration famList = _pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = _pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getIsTyped()){ byte[] markers = currentInd.getMarker(loc); allele1 = markers[0]; allele1_string = Integer.toString(allele1); allele2 = markers[1]; allele2_string = Integer.toString(allele2); String familyID = currentInd.getFamilyID(); incOrSetOne(numindivs,familyID); //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if(!(currentInd.getMomID().equals("0") || currentInd.getDadID().equals("0"))){ //do mendel check //byte[] marker = ((Individual)pedFileHash.get(familyID + " " + currentInd.getMomID())).getMarker(loc); byte[] marker = (currentFamily.getMember(currentInd.getMomID())).getMarker(loc); int momAllele1 = marker[0]; int momAllele2 = marker[1]; //marker = ((Individual)pedFileHash.get(familyID + " " + currentInd.getDadID())).getMarker(loc); marker = (currentFamily.getMember(currentInd.getDadID())).getMarker(loc); int dadAllele1 = marker[0]; int dadAllele2 = marker[1]; //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } //end mendel check } } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getIsTyped()){ byte[] markers; byte[] zeroArray = {0,0}; if (currentInd.getZeroed(loc)){ markers = zeroArray; }else{ markers = currentInd.getMarker(loc); } allele1 = markers[0]; allele1_string = Integer.toString(allele1); allele2 = markers[1]; allele2_string = Integer.toString(allele2); String familyID = currentInd.getFamilyID(); incOrSetOne(numindivs,familyID); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has parents if(currentInd.getMomID().compareTo(Individual.DATA_MISSING)==0 && currentInd.getDadID().compareTo(Individual.DATA_MISSING)==0){ //$parentgeno{$ped}++ //set parentgeno incOrSetOne(parentgeno,familyID); if(allele1 != allele2) { parenthet++; } else{ incOrSetOne(parenthom,allele1_string); } } else{//$kidgeno{$ped}++ incOrSetOne(kidgeno,familyID); } if(allele1 == allele2) { hom++; } else { het++; } //count number of allele incOrSetOne(count,allele1_string); incOrSetOne(count,allele2_string); } //missing data else missing++; } } } double obsHET = getObsHET(het, hom); double[] freqStuff = getFreqStuff(count); double preHET = freqStuff[0]; double maf = freqStuff[1]; //HW p value double pvalue = getPValue(parenthom, parenthet); //geno percent double genopct = getGenoPercent(het, hom, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(numindivs, parentgeno, kidgeno); //rating int rating = this.getRating(genopct, pvalue, obsHET, mendErrNum,maf); result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); result.setName(name); return result; }
instance.updateDB();
public void addInstance( Volume volume, File instanceFile, int instanceType ) { ArrayList origInstances = getInstances(); ImageInstance instance = ImageInstance.create( volume, instanceFile, this ); instance.setInstanceType( instanceType ); origInstances.add( instance ); }
request.setAttribute(RequestAttributes.NAV_CURRENT_PAGE, "Query");
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception { MBeanQueryForm queryForm = (MBeanQueryForm)actionForm; final String queryObjectName = queryForm.getObjectName(); MBeanService mbeanService = ServiceFactory.getMBeanService(); List mbeanDataList = mbeanService.queryMBeans(Utils.getServiceContext(context), queryObjectName); Map domainToObjectNameListMap = new TreeMap(); ObjectNameTuple tuple = new ObjectNameTuple(); for(Iterator it=mbeanDataList.iterator(); it.hasNext();){ MBeanData mbeanData = (MBeanData)it.next(); tuple.setObjectName(mbeanData.getName()); String domain = tuple.getDomain(); String name = tuple.getName(); Set objectNameList = (Set)domainToObjectNameListMap.get(domain); if(objectNameList == null){ objectNameList = new TreeSet(); domainToObjectNameListMap.put(domain, objectNameList); } objectNameList.add(name); } request.setAttribute("domainToObjectNameListMap", domainToObjectNameListMap); return mapping.findForward(Forwards.SUCCESS); }
rs.close();
public void findConflicting() throws SQLException { conflicts = new ArrayList(); if (logger.isDebugEnabled()) { logger.debug("About to find conflicting objects for DDL Script: "+ddlStatements); } Iterator it = ddlStatements.iterator(); while (it.hasNext()) { DDLStatement ddlStmt = (DDLStatement) it.next(); if (ddlStmt.getType() != DDLStatement.StatementType.CREATE) continue; SQLObject so = ddlStmt.getObject(); Class clazz = so.getClass(); if (clazz.equals(SQLTable.class)) { SQLTable t = (SQLTable) so; String cat = ddlStmt.getTargetCatalog(); String sch = ddlStmt.getTargetSchema(); if (logger.isDebugEnabled()) { logger.debug("Finding conflicts for TABLE '" + cat + "'.'" + sch + "'.'" + t.getName() + "'"); } ResultSet rs = dbmd.getTables(cat, sch, t.getName(), null); while (rs.next()) { Conflict c = new Conflict( rs.getString("TABLE_TYPE"), rs.getString("TABLE_CAT"), rs.getString("TABLE_SCHEM"), rs.getString("TABLE_NAME")); c.setSqlDropStatement("DROP TABLE "+c.getQualifiedName()); List dependants = new ArrayList(); c.addTableDependants(); conflicts.add(c); } } else if (clazz.equals(SQLRelationship.class)) { logger.error("Relationship conflicts are not supported yet!"); } else { throw new IllegalArgumentException( "Unknown subclass of SQLObject: " + clazz.getName()); } } if (logger.isDebugEnabled()) { logger.debug("Found conflicts: " + conflicts); } }
timer = new javax.swing.Timer(500, new ActionListener(){
timer = new javax.swing.Timer(50, new ActionListener(){
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == "Open"){ int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { try{ theData = new HaploData(fc.getSelectedFile()); infileName = fc.getSelectedFile().getName(); //compute D primes and monitor progress progressMonitor = new ProgressMonitor(this, "Computing " + theData.getToBeCompleted() + " values of D prime","", 0, theData.getToBeCompleted()); progressMonitor.setProgress(0); progressMonitor.setMillisToDecideToPopup(2000); final SwingWorker worker = new SwingWorker(){ public Object construct(){ theData.doMonitoredComputation(); return ""; } }; timer = new javax.swing.Timer(500, new ActionListener(){ public void actionPerformed(ActionEvent evt){ progressMonitor.setProgress(theData.getComplete()); if (theData.getComplete() == theData.getToBeCompleted()){ timer.stop(); progressMonitor.close(); infoKnown=false; drawPicture(theData); loadInfoMenuItem.setEnabled(true); hapMenuItem.setEnabled(true); customizeHapsMenuItem.setEnabled(true); exportMenuItem.setEnabled(true); saveDprimeMenuItem.setEnabled(true); clearBlocksMenuItem.setEnabled(true); guessBlocksMenuItem.setEnabled(true); } } }); worker.start(); timer.start(); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } } } else if (command == loadInfoStr){ int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { try{ int good = theData.prepareMarkerInput(fc.getSelectedFile()); if (good == -1){ JOptionPane.showMessageDialog(this, "Number of markers in info file does not match number of markers in dataset.", "Error", JOptionPane.ERROR_MESSAGE); }else{ infoKnown=true; // loadInfoMenuItem.setEnabled(false); drawPicture(theData); } }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } } }else if (command == "Clear All Blocks"){ theBlocks.clearBlocks(); }else if (command == "Define Blocks"){ defineBlocks(); }else if (command == "Customize Haplotype Output"){ customizeHaps(); }else if (command == "Tutorial"){ showHelp(); }else if (command == "Export LD Picture to JPG"){ doExportDPrime(); }else if (command == "Dump LD Output to Text"){ saveDprimeToText(); }else if (command == "Save Haplotypes to Text"){ saveHapsToText(); }else if (command == "Save Haplotypes to JPG"){ saveHapsPic(); }else if (command == "Generate Haplotypes"){ try{ drawHaplos(theData.generateHaplotypes(theData.blocks, haploThresh)); saveHapsMenuItem.setEnabled(true); saveHapsPicMenuItem.setEnabled(true); }catch (IOException ioe){} } else if (command == "Exit"){ System.exit(0); } }
if (theData.getComplete() == theData.getToBeCompleted()){
if (theData.finished){
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == "Open"){ int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { try{ theData = new HaploData(fc.getSelectedFile()); infileName = fc.getSelectedFile().getName(); //compute D primes and monitor progress progressMonitor = new ProgressMonitor(this, "Computing " + theData.getToBeCompleted() + " values of D prime","", 0, theData.getToBeCompleted()); progressMonitor.setProgress(0); progressMonitor.setMillisToDecideToPopup(2000); final SwingWorker worker = new SwingWorker(){ public Object construct(){ theData.doMonitoredComputation(); return ""; } }; timer = new javax.swing.Timer(500, new ActionListener(){ public void actionPerformed(ActionEvent evt){ progressMonitor.setProgress(theData.getComplete()); if (theData.getComplete() == theData.getToBeCompleted()){ timer.stop(); progressMonitor.close(); infoKnown=false; drawPicture(theData); loadInfoMenuItem.setEnabled(true); hapMenuItem.setEnabled(true); customizeHapsMenuItem.setEnabled(true); exportMenuItem.setEnabled(true); saveDprimeMenuItem.setEnabled(true); clearBlocksMenuItem.setEnabled(true); guessBlocksMenuItem.setEnabled(true); } } }); worker.start(); timer.start(); }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } } } else if (command == loadInfoStr){ int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { try{ int good = theData.prepareMarkerInput(fc.getSelectedFile()); if (good == -1){ JOptionPane.showMessageDialog(this, "Number of markers in info file does not match number of markers in dataset.", "Error", JOptionPane.ERROR_MESSAGE); }else{ infoKnown=true; // loadInfoMenuItem.setEnabled(false); drawPicture(theData); } }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } } }else if (command == "Clear All Blocks"){ theBlocks.clearBlocks(); }else if (command == "Define Blocks"){ defineBlocks(); }else if (command == "Customize Haplotype Output"){ customizeHaps(); }else if (command == "Tutorial"){ showHelp(); }else if (command == "Export LD Picture to JPG"){ doExportDPrime(); }else if (command == "Dump LD Output to Text"){ saveDprimeToText(); }else if (command == "Save Haplotypes to Text"){ saveHapsToText(); }else if (command == "Save Haplotypes to JPG"){ saveHapsPic(); }else if (command == "Generate Haplotypes"){ try{ drawHaplos(theData.generateHaplotypes(theData.blocks, haploThresh)); saveHapsMenuItem.setEnabled(true); saveHapsPicMenuItem.setEnabled(true); }catch (IOException ioe){} } else if (command == "Exit"){ System.exit(0); } }
if (theData.getComplete() == theData.getToBeCompleted()){
if (theData.finished){
public void actionPerformed(ActionEvent evt){ progressMonitor.setProgress(theData.getComplete()); if (theData.getComplete() == theData.getToBeCompleted()){ timer.stop(); progressMonitor.close(); infoKnown=false; drawPicture(theData); loadInfoMenuItem.setEnabled(true); hapMenuItem.setEnabled(true); customizeHapsMenuItem.setEnabled(true); exportMenuItem.setEnabled(true); saveDprimeMenuItem.setEnabled(true); clearBlocksMenuItem.setEnabled(true); guessBlocksMenuItem.setEnabled(true); } }
output.append(refreshInterval + ", " + appId + ")\", " + refreshInterval + ");");
output.append(refreshInterval + ", " + appId + ",''dummy'',''dummy'')\", " + refreshInterval + ");");
public String draw(DashboardContext context) { StringBuffer output = new StringBuffer(); output.append(component.draw(context)); // append script final String dashboardId = context.getDashboardConfig().getDashboardId(); String appId = context.getWebContext().getApplicationConfig().getApplicationId(); output.append("\n<script>"); output.append("self.setTimeout(\"refreshDBComponent("); output.append("''"); output.append(dashboardId); output.append("'', ''"); output.append(getId()); output.append("'', "); output.append(refreshInterval + ", " + appId + ")\", " + refreshInterval + ");"); output.append("</script>"); return output.toString(); }
pp.selectNone();
public void actionPerformed(ActionEvent evt) { logger.debug("the hashcode is: " + super.hashCode()); pkTable = null; fkTable = null; logger.debug("Starting to create relationship!"); active = true; pp.setCursor(Cursor.getPredefinedCursor(Cursor.CROSSHAIR_CURSOR)); }
if (freqs[i] > FindBlocks.fourGameteCutoff) numGam++;
if (freqs[i] > FindBlocks.fourGameteCutoff + 1E-8) numGam++;
public void colorDPrime(int scheme){ PairwiseLinkage dPrime[][] = theData.filteredDPrimeTable; if (scheme == STD_SCHEME){ // set coloring based on LOD and D' for (int i = 0; i < dPrime.length; i++){ for (int j = i+1; j < dPrime[i].length; j++){ PairwiseLinkage thisPair = dPrime[i][j]; if (thisPair == null){ continue; } double d = thisPair.getDPrime(); double l = thisPair.getLOD(); Color boxColor = null; if (l > 2) { if (d < 0.5) { //high LOD, low D' boxColor = new Color(255, 224, 224); } else { //high LOD, high D' shades of red double blgr = (255-32)*2*(1-d); boxColor = new Color(255, (int) blgr, (int) blgr); //boxColor = new Color(224, (int) blgr, (int) blgr); } } else if (d > 0.99) { //high D', low LOD blueish color boxColor = new Color(192, 192, 240); } else { //no LD boxColor = Color.white; } thisPair.setColor(boxColor); } } }else if (scheme == SFS_SCHEME){ for (int x = 0; x < dPrime.length-1; x++){ for (int y = x+1; y < dPrime.length; y++){ PairwiseLinkage thisPair = dPrime[x][y]; if (thisPair == null){ continue; } //get the right bits double lowCI = thisPair.getConfidenceLow(); double highCI = thisPair.getConfidenceHigh(); //color in squares if (lowCI >= FindBlocks.cutLowCI && highCI >= FindBlocks.cutHighCI) { thisPair.setColor(Color.darkGray); //strong LD }else if (highCI >= FindBlocks.recHighCI) { thisPair.setColor(Color.lightGray); //uninformative } else { thisPair.setColor(Color.white); //recomb } } } }else if (scheme == GAM_SCHEME){ for (int x = 0; x < dPrime.length-1; x++){ for (int y = x+1; y < dPrime.length; y++){ PairwiseLinkage thisPair = dPrime[x][y]; if (thisPair == null) { continue; } double[] freqs = thisPair.getFreqs(); int numGam = 0; for (int i = 0; i < freqs.length; i++){ if (freqs[i] > FindBlocks.fourGameteCutoff) numGam++; } //color in squares if(numGam > 3){ thisPair.setColor(Color.white); }else{ thisPair.setColor(Color.darkGray); } } } } }
refresh(0);
refresh();
public void mouseClicked(MouseEvent e) { if ((e.getModifiers() & InputEvent.BUTTON1_MASK) == InputEvent.BUTTON1_MASK) { int clickX = e.getX(); int clickY = e.getY(); if (showWM && wmInteriorRect.contains(clickX,clickY)){ //convert a click on the worldmap to a point on the big picture int bigClickX = (((clickX - getVisibleRect().x - (worldmap.getWidth()-wmInteriorRect.width)/2) * chartSize.width) / wmInteriorRect.width)-getVisibleRect().width/2; int bigClickY = (((clickY - getVisibleRect().y - (worldmap.getHeight() - wmInteriorRect.height)/2 - (getVisibleRect().height-worldmap.getHeight())) * chartSize.height) / wmInteriorRect.height) - getVisibleRect().height/2 + infoHeight; //System.out.println(chartSize.height); //if the clicks are near the edges, correct values if (bigClickX > chartSize.width - getVisibleRect().width){ bigClickX = chartSize.width - getVisibleRect().width; } if (bigClickX < 0){ bigClickX = 0; } if (bigClickY > chartSize.height - getVisibleRect().height + infoHeight){ bigClickY = chartSize.height - getVisibleRect().height + infoHeight; } if (bigClickY < 0){ bigClickY = 0; } ((JViewport)getParent()).setViewPosition(new Point(bigClickX,bigClickY)); }else{ Rectangle blockselector = new Rectangle(clickXShift-boxRadius,clickYShift - boxRadius, (Chromosome.getFilteredSize()*boxSize), boxSize); if(blockselector.contains(clickX,clickY)){ int whichMarker = (int)(0.5 + (double)((clickX - clickXShift))/boxSize); if (theData.isInBlock[whichMarker]){ theData.removeFromBlock(whichMarker); refresh(0); } else if (whichMarker > 0 && whichMarker < Chromosome.realIndex.length){ theData.addMarkerIntoSurroundingBlock(whichMarker); } } } } }
refresh(0);
refresh();
public void mouseReleased(MouseEvent e) { //remove popped up window if ((e.getModifiers() & InputEvent.BUTTON3_MASK) == InputEvent.BUTTON3_MASK){ popupExists = false; repaint(); //resize window once user has ceased dragging } else if ((e.getModifiers() & InputEvent.BUTTON1_MASK) == InputEvent.BUTTON1_MASK){ if (getCursor() == Cursor.getPredefinedCursor(Cursor.NE_RESIZE_CURSOR)){ resizeRectExists = false; noImage = true; if (resizeWMRect.width > 20){ wmMaxWidth = resizeWMRect.width; } setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); repaint(); } if (getCursor() == Cursor.getPredefinedCursor(Cursor.E_RESIZE_CURSOR)){ setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); blockRectExists = false; int firstMarker = (int)(0.5 + (double)((blockStartX - clickXShift))/boxSize); int lastMarker = (int)(0.5 + (double)((e.getX() - clickXShift))/boxSize); if (firstMarker > lastMarker){ int temp = firstMarker; firstMarker = lastMarker; lastMarker = temp; } theData.addBlock(firstMarker, lastMarker); refresh(0); } } }
public void refresh(int scheme){ if (scheme != 0){ colorDPrime(scheme); }
public void refresh(){
public void refresh(int scheme){ if (scheme != 0){ colorDPrime(scheme); } noImage = true; repaint(); }
for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } }
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue()); } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue()); } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue());
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue()); } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue()); } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue());
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue()); } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue()); } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0;
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue()); } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue()); } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
multilocusTable[i] = crossPercentages; } int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00;
Haplotype[][] generateCrossovers(Haplotype[][] haplos) throws HaploViewException{ Vector crossBlock = new Vector(); double CROSSOVER_THRESHOLD = 0.001; //to what percentage do we want to consider crossings? if (haplos.length == 0) return null; //seed first block with ordering numbers for (int u = 0; u < haplos[0].length; u++){ haplos[0][u].setListOrder(u); } for (int i = 0; i < haplos.length; i++){ haplos[i][0].clearTags(); } multidprimeArray = new double[haplos.length]; //get "tag" SNPS if there is only one block: if (haplos.length==1){ Vector theBestSubset = getBestSubset(haplos[0]); for (int i = 0; i < theBestSubset.size(); i++){ haplos[0][0].addTag(((Integer)theBestSubset.elementAt(i)).intValue()); } } for (int gap = 0; gap < haplos.length - 1; gap++){ //compute crossovers for each inter-block gap Vector preGapSubset = getBestSubset(haplos[gap]); Vector postGapSubset = getBestSubset(haplos[gap+1]); int[] preMarkerID = haplos[gap][0].getMarkers(); //index haplos to markers in whole dataset int[] postMarkerID = haplos[gap+1][0].getMarkers(); crossBlock.clear(); //make a "block" of the markers which id the pre- and post- gap haps for (int i = 0; i < preGapSubset.size(); i++){ crossBlock.add(new Integer(preMarkerID[((Integer)preGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap][0].addTag(((Integer)preGapSubset.elementAt(i)).intValue()); } for (int i = 0; i < postGapSubset.size(); i++){ crossBlock.add(new Integer(postMarkerID[((Integer)postGapSubset.elementAt(i)).intValue()])); //mark tags haplos[gap+1][0].addTag(((Integer)postGapSubset.elementAt(i)).intValue()); } Vector inputVector = new Vector(); int[] intArray = new int[crossBlock.size()]; for (int i = 0; i < crossBlock.size(); i++){ //input format for hap generating routine intArray[i] = ((Integer)crossBlock.elementAt(i)).intValue(); } inputVector.add(intArray); Haplotype[] crossHaplos = generateHaplotypes(inputVector,true)[0]; //get haplos of gap double[][] multilocusTable = new double[haplos[gap].length][]; double[] rowSum = new double[haplos[gap].length]; double[] colSum = new double[haplos[gap+1].length]; double multilocusTotal = 0; for (int i = 0; i < haplos[gap].length; i++){ double[] crossPercentages = new double[haplos[gap+1].length]; StringBuffer firstHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string out of uniquely identifying genotypes for this hap firstHapCodeB.append(haplos[gap][i].getGeno()[((Integer)preGapSubset.elementAt(j)).intValue()]); } String firstHapCode = firstHapCodeB.toString(); for (int gapHaplo = 0; gapHaplo < crossHaplos.length; gapHaplo++){ //look at each crossover hap if (crossHaplos[gapHaplo].getPercentage() > CROSSOVER_THRESHOLD){ StringBuffer gapBeginHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = 0; j < preGapSubset.size(); j++){ //make a string as above gapBeginHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapBeginHapCode = gapBeginHapCodeB.toString(); if (gapBeginHapCode.equals(firstHapCode)){ //if this crossover hap corresponds to this pregap hap StringBuffer gapEndHapCodeB = new StringBuffer(preGapSubset.size()); for (int j = preGapSubset.size(); j < crossHaplos[gapHaplo].getGeno().length; j++){ gapEndHapCodeB.append(crossHaplos[gapHaplo].getGeno()[j]); } String gapEndHapCode = gapEndHapCodeB.toString(); for (int j = 0; j < haplos[gap+1].length; j++){ StringBuffer endHapCodeB = new StringBuffer(); for (int k = 0; k < postGapSubset.size(); k++){ endHapCodeB.append(haplos[gap+1][j].getGeno()[((Integer)postGapSubset.elementAt(k)).intValue()]); } String endHapCode = endHapCodeB.toString(); if (gapEndHapCode.equals(endHapCode)){ crossPercentages[j] = crossHaplos[gapHaplo].getPercentage(); } } } } } //thought i needed to fix these percentages, but the raw values are just as good. /* double percentageSum = 0; double[] fixedCross = new double[crossPercentages.length]; for (int y = 0; y < crossPercentages.length; y++){ percentageSum += crossPercentages[y]; } for (int y = 0; y < crossPercentages.length; y++){ fixedCross[y] = crossPercentages[y]/percentageSum; }*/ haplos[gap][i].addCrossovers(crossPercentages); multilocusTable[i] = crossPercentages; } //sort based on "straight line" crossings int hilimit; int lolimit; if (haplos[gap+1].length > haplos[gap].length) { hilimit = haplos[gap+1].length; lolimit = haplos[gap].length; }else{ hilimit = haplos[gap].length; lolimit = haplos[gap+1].length; } boolean[] unavailable = new boolean[hilimit]; int[] prevBlockLocs = new int[haplos[gap].length]; for (int q = 0; q < prevBlockLocs.length; q++){ prevBlockLocs[haplos[gap][q].getListOrder()] = q; } for (int u = 0; u < haplos[gap+1].length; u++){ double currentBestVal = 0; int currentBestLoc = -1; for (int v = 0; v < lolimit; v++){ if (!(unavailable[v])){ if (haplos[gap][prevBlockLocs[v]].getCrossover(u) >= currentBestVal) { currentBestLoc = haplos[gap][prevBlockLocs[v]].getListOrder(); currentBestVal = haplos[gap][prevBlockLocs[v]].getCrossover(u); } } } //it didn't get lined up with any of the previous block's markers //put it at the end of the list if (currentBestLoc == -1){ for (int v = 0; v < unavailable.length; v++){ if (!(unavailable[v])){ currentBestLoc = v; break; } } } haplos[gap+1][u].setListOrder(currentBestLoc); unavailable[currentBestLoc] = true; } //compute multilocus D' for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ rowSum[i] += multilocusTable[i][j]; colSum[j] += multilocusTable[i][j]; multilocusTotal += multilocusTable[i][j]; if (rowSum[i] == 0) rowSum[i] = 0.0001; if (colSum[j] == 0) colSum[j] = 0.0001; } } double multidprime = 0; boolean noDivByZero = false; for (int i = 0; i < rowSum.length; i++){ for (int j = 0; j < colSum.length; j++){ double num = (multilocusTable[i][j]/multilocusTotal) - (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom; if (num < 0){ double denom1 = (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(1.0 - (colSum[j]/multilocusTotal)); if (denom1 < denom2) { denom = denom1; }else{ denom = denom2; } }else{ double denom1 = (rowSum[i]/multilocusTotal)*(1.0 -(colSum[j]/multilocusTotal)); double denom2 = (1.0 - (rowSum[i]/multilocusTotal))*(colSum[j]/multilocusTotal); if (denom1 < denom2){ denom = denom1; }else{ denom = denom2; } } if (denom != 0){ noDivByZero = true; multidprime += (rowSum[i]/multilocusTotal)*(colSum[j]/multilocusTotal)*Math.abs(num/denom); } } } if (noDivByZero){ multidprimeArray[gap] = multidprime; }else{ multidprimeArray[gap] = 1.00; } } return haplos; }
Haplotype[][] results = new Haplotype[blocks.size()][];
Haplotype[][] rawHaplotypes = new Haplotype[blocks.size()][];
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ results[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = results; } return results; }
results[k] = new Haplotype[theEM.numHaplos()];
rawHaplotypes[k] = new Haplotype[theEM.numHaplos()];
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ results[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = results; } return results; }
results[k][z] = tempArray[z];
rawHaplotypes[k][z] = tempArray[z];
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ results[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = results; } return results; }
haplotypes = results;
haplotypes = new Haplotype[rawHaplotypes.length][]; for (int i = 0; i < rawHaplotypes.length; i++) { Vector orderedHaps = new Vector(); for (int hapCount = 0; hapCount < rawHaplotypes[i].length; hapCount++) { if (orderedHaps.size() == 0) { orderedHaps.add(rawHaplotypes[i][hapCount]); } else { for (int j = 0; j < orderedHaps.size(); j++) { if (((Haplotype)(orderedHaps.elementAt(j))).getPercentage() < rawHaplotypes[i][hapCount].getPercentage()) { orderedHaps.add(j, rawHaplotypes[i][hapCount]); break; } if ((j+1) == orderedHaps.size()) { orderedHaps.add(rawHaplotypes[i][hapCount]); break; } } } } haplotypes[i] = new Haplotype[orderedHaps.size()]; orderedHaps.copyInto(haplotypes[i]); } haplotypes = generateCrossovers(haplotypes); return haplotypes;
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ results[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = results; } return results; }
return results;
return rawHaplotypes;
Haplotype[][] generateHaplotypes(Vector blocks, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2) != null && dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); //int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } //if (tempPerc*100 > hapthresh){ tempArray[i] = new Haplotype(genos, returnedFreqs[i], preFiltBlock); //if we are performing association tests, then store the results if (Options.getAssocTest() == ASSOC_TRIO){ tempArray[i].setTransCount(theEM.getTransCount(i)); tempArray[i].setUntransCount(theEM.getUntransCount(i)); }else if (Options.getAssocTest() == ASSOC_CC){ tempArray[i].setCaseFreq(theEM.getCaseFreq(i)); tempArray[i].setControlFreq(theEM.getControlFreq(i)); } //p++; //} } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[theEM.numHaplos()]; for (int z = 0; z < theEM.numHaplos(); z++){ results[k][z] = tempArray[z]; } } if (!crossover){ haplotypes = results; } return results; }
registerPreferenceDialogTag("preferenceDialog", PreferenceDialogTag.class); registerTag("preferencePage", PreferencePageTag.class); registerFieldEditorTag("booleanFieldEditor", BooleanFieldEditor.class); registerFieldEditorTag("colorFieldEditor", ColorFieldEditor.class); registerFieldEditorTag("directoryFieldEditor", DirectoryFieldEditor.class); registerFieldEditorTag("fileFieldEditor", FileFieldEditor.class); registerFieldEditorTag("fontFieldEditor", FontFieldEditor.class); registerFieldEditorTag("integerFieldEditor", IntegerFieldEditor.class); registerFieldEditorTag("stringFieldEditor", StringFieldEditor.class);
public JFaceTagLibrary() { // Viewer tags registerViewerTag("tableViewer", TableViewer.class); registerViewerTag("tableTreeViewer", TableTreeViewer.class); registerViewerTag("treeViewer", TreeViewer.class); registerViewerTag("checkboxTreeViewer", CheckboxTreeViewer.class); // Event tags registerTag("doubleClickListener", DoubleClickListenerTag.class); registerTag( "selectionChangedListener", SelectionChangedListenerTag.class); // Window tags registerWindowTag("applicationWindow", ApplicationWindow.class); // ContributionManager tags registerMenuManager("menuManager", MenuManagerTag.class); // Action tags registerActionTag("action", ActionTag.class); // ContributionItem tags registerContributionItemTag("separator", Separator.class); }
public Tag createTag(String name, Attributes attributes) throws JellyException { return new MenuManagerTag();
public Tag createTag(String name, Attributes attributes) throws JellyException { return new ViewerTag(theClass, style);
public Tag createTag(String name, Attributes attributes) throws JellyException { return new MenuManagerTag(); }
public Tag createTag(String name, Attributes attributes) throws JellyException { return new ViewerTag(theclass, style);
public Tag createTag(String name, Attributes attributes) throws JellyException { return new MenuManagerTag();
public Tag createTag(String name, Attributes attributes) throws JellyException { return new ViewerTag(theclass, style); }
if (c instanceof Selectable) {
if (c instanceof Relationship) { ((Relationship) c).setSelected(((Relationship) c).intersects(rubberBand)); } else if (c instanceof Selectable) {
public void mouseMoved(MouseEvent evt) { if (rubberBand != null) { // repaint old region in case of shrinkage Rectangle dirtyRegion = zoomRect(new Rectangle(rubberBand)); Point p = unzoomPoint(new Point(evt.getPoint())); rubberBand.setBounds(rubberBandOrigin.x, rubberBandOrigin.y, 0, 0); rubberBand.add(p); // update selected items Rectangle temp = new Rectangle(); // avoids multiple allocations in getBounds for (int i = 0, n = contentPane.getComponentCount(); i < n; i++) { Component c = contentPane.getComponent(i); if (c instanceof Selectable) { ((Selectable) c).setSelected(rubberBand.intersects(c.getBounds(temp))); } } // Add the new rubberband to the dirty region and grow // it in case the line is thick due to extreme zoom dirtyRegion.add(zoomRect(new Rectangle(rubberBand))); dirtyRegion.x -= 3; dirtyRegion.y -= 3; dirtyRegion.width += 6; dirtyRegion.height += 6; repaint(dirtyRegion); } else { retargetToContentPane(evt); } }
Rectangle dirtyRegion = rubberBand;
Rectangle dirtyRegion = new Rectangle(rubberBand); dirtyRegion.width += (int) (dirtyRegion.width * 0.1); dirtyRegion.height += (int) (dirtyRegion.height * 0.1);
public void mouseReleased(MouseEvent evt) { if (rubberBand != null) { if (evt.getButton() == MouseEvent.BUTTON1) { Rectangle dirtyRegion = rubberBand; rubberBandOrigin = null; rubberBand = null; repaint(zoomRect(new Rectangle(dirtyRegion))); } } else if (!retargetToContentPane(evt)) { //((PlayPen) evt.getSource()).selectNone(); maybeShowPopup(evt); } }
repaint(zoomRect(new Rectangle(dirtyRegion)));
repaint(zoomRect(dirtyRegion));
public void mouseReleased(MouseEvent evt) { if (rubberBand != null) { if (evt.getButton() == MouseEvent.BUTTON1) { Rectangle dirtyRegion = rubberBand; rubberBandOrigin = null; rubberBand = null; repaint(zoomRect(new Rectangle(dirtyRegion))); } } else if (!retargetToContentPane(evt)) { //((PlayPen) evt.getSource()).selectNone(); maybeShowPopup(evt); } }
Composite backupComp = g2.getComposite(); g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.3f)); g2.fillRect(rubberBand.x, rubberBand.y, rubberBand.width-1, rubberBand.height-1); g2.setComposite(backupComp);
public void paintComponent(Graphics g) { logger.debug("start of paintComponent, width="+getWidth()+",height="+getHeight()); Graphics2D g2 = (Graphics2D) g; g2.setColor(getBackground()); g2.fillRect(0, 0, getWidth(), getHeight()); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, antialiasSetting); if (logger.isDebugEnabled()) { Rectangle clip = g2.getClipBounds(); if (clip != null) { g2.setColor(Color.green); clip.width--; clip.height--; g2.draw(clip); g2.setColor(getBackground()); logger.debug("Clipping region: "+g2.getClip()); } else { logger.debug("Null clipping region"); } } Rectangle bounds = new Rectangle(); AffineTransform backup = g2.getTransform(); g2.scale(zoom, zoom); AffineTransform zoomedOrigin = g2.getTransform(); // counting down so visual z-order matches click detection z-order for (int i = contentPane.getComponentCount() - 1; i >= 0; i--) { Component c = contentPane.getComponent(i); c.getBounds(bounds); if (c.isVisible() && g2.hitClip(bounds.x, bounds.y, bounds.width, bounds.height)) { if (logger.isDebugEnabled()) logger.debug("Painting visible component "+c); g2.translate(c.getLocation().x, c.getLocation().y); c.paint(g2); g2.setTransform(zoomedOrigin); } else { if (logger.isDebugEnabled()) logger.debug("SKIPPING "+c); } } if (rubberBand != null && !rubberBand.isEmpty()) { if (logger.isDebugEnabled()) logger.debug("painting rubber band "+rubberBand); g2.setColor(rubberBandColor); g2.drawRect(rubberBand.x, rubberBand.y, rubberBand.width-1, rubberBand.height-1); } g2.setTransform(backup); logger.debug("end of paintComponent, width="+getWidth()+",height="+getHeight()); }
r.setSelected(false);
public void dbChildrenRemoved(SQLObjectEvent e) { logger.debug("SQLObject children got removed: "+e); boolean fireEvent = false; SQLObject o = e.getSQLSource(); SQLObject[] c = e.getChildren(); for (int i = 0; i < c.length; i++) { try { ArchitectUtils.unlistenToHierarchy(this, c[i]); } catch (ArchitectException ex) { logger.error("Couldn't unlisten to removed object", ex); } if (c[i] instanceof SQLTable) { for (int j = 0; j < contentPane.getComponentCount(); j++) { if (contentPane.getComponent(j) instanceof TablePane) { TablePane tp = (TablePane) contentPane.getComponent(j); if (tp.getModel() == c[i]) { contentPane.remove(j); fireEvent = true; } } } } else if (c[i] instanceof SQLRelationship) { for (int j = 0; j < contentPane.getComponentCount(); j++) { if (contentPane.getComponent(j) instanceof Relationship) { Relationship r = (Relationship) contentPane.getComponent(j); if (r.getModel() == c[i]) { contentPane.remove(j); fireEvent = true; } } } } } if (fireEvent) { firePropertyChange("model.children", null, null); repaint(); } }
StringTokenizer enum = new StringTokenizer( text, "," );
StringTokenizer pointEnum = new StringTokenizer( text, "," );
public Object convert(Class type, Object value) { if ( value != null ) { String text = value.toString(); StringTokenizer enum = new StringTokenizer( text, "," ); int x = 0; int y = 0; if ( enum.hasMoreTokens() ) { x = parseNumber( enum.nextToken() ); } if ( enum.hasMoreTokens() ) { y = parseNumber( enum.nextToken() ); }
if ( enum.hasMoreTokens() ) { x = parseNumber( enum.nextToken() );
if ( pointEnum.hasMoreTokens() ) { x = parseNumber( pointEnum.nextToken() );
public Object convert(Class type, Object value) { if ( value != null ) { String text = value.toString(); StringTokenizer enum = new StringTokenizer( text, "," ); int x = 0; int y = 0; if ( enum.hasMoreTokens() ) { x = parseNumber( enum.nextToken() ); } if ( enum.hasMoreTokens() ) { y = parseNumber( enum.nextToken() ); }
if ( enum.hasMoreTokens() ) { y = parseNumber( enum.nextToken() );
if ( pointEnum.hasMoreTokens() ) { y = parseNumber( pointEnum.nextToken() );
public Object convert(Class type, Object value) { if ( value != null ) { String text = value.toString(); StringTokenizer enum = new StringTokenizer( text, "," ); int x = 0; int y = 0; if ( enum.hasMoreTokens() ) { x = parseNumber( enum.nextToken() ); } if ( enum.hasMoreTokens() ) { y = parseNumber( enum.nextToken() ); }
return new Point( x, y ); } return null; }
public Object convert(Class type, Object value) { if ( value != null ) { String text = value.toString(); StringTokenizer enum = new StringTokenizer( text, "," ); int x = 0; int y = 0; if ( enum.hasMoreTokens() ) { x = parseNumber( enum.nextToken() ); } if ( enum.hasMoreTokens() ) { y = parseNumber( enum.nextToken() ); }
this.dynaBean = new ConvertingWrapDynaBean(fileScanner);
public FileScannerTag(FileScanner fileScanner) { this.fileScanner = fileScanner; this.dynaBean = new ConvertingWrapDynaBean(fileScanner); }
byte[] zeroArray = {0,0};
public Vector linkageToChrom(File infile, int type) throws IllegalArgumentException, HaploViewException, PedFileException, IOException{ pedFile = new PedFile(); if (type == PED_FILE){ pedFile.parseLinkage(infile); }else{ pedFile.parseHapMap(infile); } Vector result = pedFile.check(); Vector indList = pedFile.getUnrelatedIndividuals(); Vector indsInTrio = new Vector(); int numMarkers = 0; numSingletons = 0; numTrios = 0; numPeds = pedFile.getNumFamilies(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); byte[] zeroArray = {0,0}; //first time through we deal with trios. for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.get(x); currentFamily = pedFile.getFamily(currentInd.getFamilyID()); if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //if indiv has both parents Individual mom = currentFamily.getMember(currentInd.getMomID()); Individual dad = currentFamily.getMember(currentInd.getDadID()); if (indList.contains(mom) && indList.contains(dad)){ numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getMomID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); } byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getDadID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); } byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else if (dad1 != 0 && dad2 != 0) { dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb, dad.getAffectedStatus(), currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); numTrios++; indsInTrio.add(mom); indsInTrio.add(dad); indsInTrio.add(currentInd); } } } for (int x=0; x<indList.size(); x++){ currentInd = (Individual)indList.get(x); if (!indsInTrio.contains(currentInd)){ //ind has no parents or kids -- he's a singleton numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } if (thisMarker[0] == thisMarker[1] || thisMarker[0] == 0 || thisMarker[1] == 0){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = (byte)(4+thisMarker[0]); chrom2[i] = (byte)(4+thisMarker[1]); } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1, currentInd.getAffectedStatus(), -1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2,currentInd.getAffectedStatus(), -1)); numSingletons++; } } chromosomes = chrom; //wipe clean any existing marker info so we know we're starting with a new file Chromosome.markers = null; return result; }