rem
stringlengths
0
477k
add
stringlengths
0
313k
context
stringlengths
6
599k
noImage = true;
public void colorDPrime(int scheme){ currentScheme = scheme; PairwiseLinkage dPrime[][] = theData.filteredDPrimeTable; if (scheme == STD_SCHEME){ // set coloring based on LOD and D' for (int i = 0; i < dPrime.length; i++){ for (int j = i+1; j < dPrime[i].length; j++){ PairwiseLinkage thisPair = dPrime[i][j]; if (thisPair == null){ continue; } double d = thisPair.getDPrime(); double l = thisPair.getLOD(); Color boxColor = null; if (l > 2) { if (d < 0.5) { //high LOD, low D' boxColor = new Color(255, 224, 224); } else { //high LOD, high D' shades of red double blgr = (255-32)*2*(1-d); boxColor = new Color(255, (int) blgr, (int) blgr); //boxColor = new Color(224, (int) blgr, (int) blgr); } } else if (d > 0.99) { //high D', low LOD blueish color boxColor = new Color(192, 192, 240); } else { //no LD boxColor = Color.white; } thisPair.setColor(boxColor); } } }else if (scheme == SFS_SCHEME){ for (int x = 0; x < dPrime.length-1; x++){ for (int y = x+1; y < dPrime.length; y++){ PairwiseLinkage thisPair = dPrime[x][y]; if (thisPair == null){ continue; } //get the right bits double lowCI = thisPair.getConfidenceLow(); double highCI = thisPair.getConfidenceHigh(); //color in squares if (lowCI >= FindBlocks.cutLowCI && highCI >= FindBlocks.cutHighCI) { thisPair.setColor(Color.darkGray); //strong LD }else if (highCI >= FindBlocks.recHighCI) { thisPair.setColor(Color.lightGray); //uninformative } else { thisPair.setColor(Color.white); //recomb } } } }else if (scheme == GAM_SCHEME){ for (int x = 0; x < dPrime.length-1; x++){ for (int y = x+1; y < dPrime.length; y++){ PairwiseLinkage thisPair = dPrime[x][y]; if (thisPair == null) { continue; } double[] freqs = thisPair.getFreqs(); int numGam = 0; for (int i = 0; i < freqs.length; i++){ //add a little bump for EM probs which should be zero but are really like 10^-10 if (freqs[i] > FindBlocks.fourGameteCutoff + 1E-8) numGam++; } //color in squares if(numGam > 3){ thisPair.setColor(Color.white); }else{ thisPair.setColor(Color.darkGray); } } } }else if (scheme == WMF_SCHEME){ // set coloring based on LOD and D', but without (arbitrary) cutoffs to introduce // "color damage" (Tufte) // first get the maximum LOD score so we can scale relative to that. double max_l = 0.0; for (int i = 0; i < dPrime.length; i++){ for (int j = i+1; j < dPrime[i].length; j++){ PairwiseLinkage thisPair = dPrime[i][j]; if (thisPair == null){ continue; } if (thisPair.getLOD() > max_l) max_l = thisPair.getLOD(); } } // cap the max LOD score if (max_l > 5.0) max_l = 5.0; for (int i = 0; i < dPrime.length; i++){ for (int j = i+1; j < dPrime[i].length; j++){ PairwiseLinkage thisPair = dPrime[i][j]; if (thisPair == null){ continue; } double d = thisPair.getDPrime(); double l = thisPair.getLOD(); Color boxColor = null; double lod_scale = l / max_l; // if greater than the cap, call it the cap if (lod_scale > 1.0) lod_scale = 1.0; // there can be negative LOD scores, apparently if (lod_scale < 0.0) lod_scale = 0.0; // also, scale the D' so anything under .2 is white. d = (1.0 / 0.8) * (d - 0.2); if (d < 0.0) d = 0.0; // if there is low(er) D' but big LOD score, this should be in a gray scale // scaled to the D' value if (lod_scale > d) { lod_scale = d; } int r, g, b; // r = (int)(200.0 * d + 55.0 * lod_scale); // g = (int)(255.0 * d - 255.0 * lod_scale); // b = (int)(255.0 * d - 255.0 * lod_scale); double ap, cp, dp, ep, jp, kp; ap = 0.0; cp = -255.0; dp = -55.0; ep = -200.0; jp = 255.0; kp = 255.0; r = (int)(ap * d + cp * lod_scale + jp); g = b = (int)(dp * d + ep * lod_scale + kp); if (r < 0) r = 0; if (g < 0) g = 0; if (b < 0) b = 0; boxColor = new Color(r, g, b); thisPair.setColor(boxColor); } } }else if (scheme == RSQ_SCHEME){ // set coloring based on R-squared values for (int i = 0; i < dPrime.length; i++){ for (int j = i+1; j < dPrime[i].length; j++){ PairwiseLinkage thisPair = dPrime[i][j]; if (thisPair == null){ continue; } double rsq = thisPair.getRSquared(); Color boxColor = null; int r, g, b; r = g = b = (int)(255.0 * (1.0 - rsq)); boxColor = new Color(r, g, b); thisPair.setColor(boxColor); } } } }
processData(theData.getPedFile().getHMInfo());
void readPedGenotypes(String[] f, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively inputOptions = f; File inFile = new File(inputOptions[0]); try { if (inFile.length() < 1){ throw new HaploViewException("Pedfile is empty or nonexistent: " + inFile.getName()); } theData = new HaploData(assocTest); theData.linkageToChrom(inFile, type); processData(theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(theData.getPedFile()); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
processData(theData.getPedFile().getHMInfo());
void readPedGenotypes(String[] f, int type){ //input is a 3 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = max comparison distance (don't compute d' if markers are greater than this dist apart) //type is either 3 or 4 for ped and hapmap files respectively inputOptions = f; File inFile = new File(inputOptions[0]); try { if (inFile.length() < 1){ throw new HaploViewException("Pedfile is empty or nonexistent: " + inFile.getName()); } theData = new HaploData(assocTest); theData.linkageToChrom(inFile, type); processData(theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(theData.getPedFile()); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
g2.setColor((val < 50) ? Color.gray : Color.black); if (boxColor.getGreen() < 100 && boxColor.getBlue() < 100 && boxColor.getRed() < 100){
if (boxColor.getGreen() < 175 && boxColor.getBlue() < 175 && boxColor.getRed() < 175){
public void paintComponent(Graphics g){ DPrimeTable dPrimeTable = theData.dpTable; if (Chromosome.getSize() < 2){ //if there zero or only one valid marker return; } Vector blocks = theData.blocks; Rectangle visRect = getVisibleRect(); //deal with zooming if (chartSize.getWidth() > (3*visRect.width)){ showWM = true; }else{ showWM = false; } boolean printValues = true; if (zoomLevel != 0 || Options.getPrintWhat() == LD_NONE){ printValues = false; } printWhat = Options.getPrintWhat(); Graphics2D g2 = (Graphics2D) g; Dimension size = getSize(); Dimension pref = getPreferredSize(); g2.setColor(BG_GREY); //if it's a big dataset, resize properly, if it's small make sure to fill whole background if (size.height < pref.height){ g2.fillRect(0,0,pref.width,pref.height); setSize(pref); }else{ g2.fillRect(0,0,size.width, size.height); } g2.setColor(Color.black); //okay so this dumb if block is to prevent the ugly repainting //bug when loading markers after the data are already being displayed, //results in a little off-centering for small datasets, but not too bad. if (!forExport){ if (!theData.infoKnown){ g2.translate((size.width - pref.width) / 2, (size.height - pref.height) / 2); } else { g2.translate((size.width - pref.width) / 2, 0); } } FontMetrics boxFontMetrics = g2.getFontMetrics(boxFont); int diamondX[] = new int[4]; int diamondY[] = new int[4]; Polygon diamond; double lineSpan = alignedPositions[alignedPositions.length-1] - alignedPositions[0]; long minpos = Chromosome.getMarker(0).getPosition(); long maxpos = Chromosome.getMarker(Chromosome.getSize()-1).getPosition(); double spanpos = maxpos - minpos; //See http://www.hapmap.org/cgi-perl/gbrowse/gbrowse_img //for more info on GBrowse img. int imgHeight = 0; if (Options.isGBrowseShown() && Chromosome.getDataChrom() != null && !Chromosome.getDataChrom().equalsIgnoreCase("none")){ g2.drawImage(gBrowseImage,H_BORDER-GBROWSE_MARGIN,V_BORDER,this); imgHeight = gBrowseImage.getHeight(this) + TRACK_GAP; // get height so we can shift everything down } left = H_BORDER; top = V_BORDER + imgHeight; // push the haplotype display down to make room for gbrowse image. if (forExport){ left -= exportStart * boxSize; } FontMetrics metrics; int ascent; g2.setFont(boldMarkerNameFont); metrics = g2.getFontMetrics(); ascent = metrics.getAscent(); //the following values are the bounds on the boxes we want to //display given that the current window is 'visRect' lowX = getBoundaryMarker(visRect.x-clickXShift-(visRect.y +visRect.height-clickYShift)) - 1; highX = getBoundaryMarker(visRect.x + visRect.width); lowY = getBoundaryMarker((visRect.x-clickXShift)+(visRect.y-clickYShift)) - 1; highY = getBoundaryMarker((visRect.x-clickXShift+visRect.width) + (visRect.y-clickYShift+visRect.height)); if (lowX < 0) { lowX = 0; } if (highX > Chromosome.getSize()-1){ highX = Chromosome.getSize()-1; } if (lowY < lowX+1){ lowY = lowX+1; } if (highY > Chromosome.getSize()){ highY = Chromosome.getSize(); } if (forExport){ lowX = exportStart; lowY = exportStart; highX = exportStop; highY = exportStop+1; } if (theData.trackExists){ //draw the analysis track above where the marker positions will be marked JFreeChart jfc = ChartFactory.createXYLineChart(null,null,null, theData.analysisTracks, PlotOrientation.VERTICAL,false,false,false); //customise the analysis track XYPlot xyp = (XYPlot)jfc.getPlot(); //no x axis, since it takes up too much space. xyp.getDomainAxis().setAxisLineVisible(false); xyp.getDomainAxis().setTickLabelsVisible(false); xyp.getDomainAxis().setTickMarksVisible(false); //x range must align with markers xyp.getDomainAxis().setRange(minpos,maxpos); //size of the axis and graph inset double axisWidth = xyp.getRangeAxis(). reserveSpace(g2,xyp,new Rectangle(0,TRACK_HEIGHT),RectangleEdge.LEFT,null).getLeft(); RectangleInsets insets = xyp.getInsets(); jfc.setBackgroundPaint(BG_GREY); BufferedImage bi = jfc.createBufferedImage( (int)(lineSpan + axisWidth + insets.getLeft() + insets.getRight()),TRACK_HEIGHT); //hide the axis in the margin so everything lines up. g2.drawImage(bi,(int)(left - axisWidth - insets.getLeft()),top,this); top += TRACK_HEIGHT + TRACK_GAP; } if (theData.infoKnown) { Color green = new Color(0, 127, 0); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); //// draw the marker locations g2.setStroke(thinnerStroke); g2.setColor(Color.white); g2.fill(new Rectangle2D.Double(left+1, top+1, lineSpan-1, TICK_HEIGHT-1)); g2.setColor(Color.black); g2.draw(new Rectangle2D.Double(left, top, lineSpan, TICK_HEIGHT)); for (int i = 0; i < Chromosome.getSize(); i++){ double pos = (Chromosome.getMarker(i).getPosition() - minpos) / spanpos; double xx = left + lineSpan*pos; // if we're zoomed, use the line color to indicate whether there is extra data available // (since the marker names are not displayed when zoomed) if (Chromosome.getMarker(i).getExtra() != null && zoomLevel != 0) g2.setColor(green); //draw tick g2.setStroke(thickerStroke); g2.draw(new Line2D.Double(xx, top, xx, top + TICK_HEIGHT)); if (Chromosome.getMarker(i).getExtra() != null && zoomLevel != 0) g2.setStroke(thickerStroke); else g2.setStroke(thinnerStroke); //draw connecting line g2.draw(new Line2D.Double(xx, top + TICK_HEIGHT, left + alignedPositions[i], top+TICK_BOTTOM)); if (Chromosome.getMarker(i).getExtra() != null && zoomLevel != 0) g2.setColor(Color.black); } top += TICK_BOTTOM + TICK_HEIGHT; //// draw the marker names if (printMarkerNames){ widestMarkerName = metrics.stringWidth(Chromosome.getMarker(0).getDisplayName()); for (int x = 1; x < Chromosome.getSize(); x++) { int thiswide = metrics.stringWidth(Chromosome.getMarker(x).getDisplayName()); if (thiswide > widestMarkerName) widestMarkerName = thiswide; } g2.translate(left, top + widestMarkerName); g2.rotate(-Math.PI / 2.0); boolean foundSNP = false; for (int x = 0; x < Chromosome.getSize(); x++) { if (theData.isInBlock[x]){ g2.setFont(boldMarkerNameFont); }else{ g2.setFont(markerNameFont); } if (Chromosome.getMarker(x).getExtra() != null) g2.setColor(green); if (theHV != null){ if (Chromosome.getMarker(x).getDisplayName().equals(theHV.getChosenMarker())){ g2.setColor(Color.blue); foundSNP = true; } } g2.drawString(Chromosome.getMarker(x).getDisplayName(),(float)TEXT_GAP, (float)alignedPositions[x] + ascent/3); if (Chromosome.getMarker(x).getExtra() != null) g2.setColor(Color.black); if (foundSNP){ g2.setColor(Color.BLACK); foundSNP = false; } } g2.rotate(Math.PI / 2.0); g2.translate(-left, -(top + widestMarkerName)); // move everybody down top += widestMarkerName + TEXT_GAP; } g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } top += blockDispHeight; //// draw the marker numbers if (printMarkerNames){ g2.setFont(markerNumFont); metrics = g2.getFontMetrics(); ascent = metrics.getAscent(); for (int x = 0; x < Chromosome.getSize(); x++) { String mark = String.valueOf(Chromosome.realIndex[x] + 1); g2.drawString(mark, (float)(left + alignedPositions[x] - metrics.stringWidth(mark)/2), (float)(top + ascent)); } top += boxRadius/2; // give a little space between numbers and boxes } //clickxshift and clickyshift are used later to translate from x,y coords //to the pair of markers comparison at those coords if (!(theData.infoKnown)){ clickXShift = left + (size.width-pref.width)/2; clickYShift = top + (size.height - pref.height)/2; } else { clickXShift = left + (size.width-pref.width)/2; clickYShift = top; } // draw table column by column for (int x = lowX; x < highX; x++) { //always draw the fewest possible boxes if (lowY < x+1){ lowY = x+1; } for (int y = lowY; y < highY; y++) { if (dPrimeTable.getLDStats(x,y) == null){ continue; } double d = dPrimeTable.getLDStats(x,y).getDPrime(); double r = dPrimeTable.getLDStats(x,y).getRSquared(); //double l = dPrimeTable.getLDStats(x,y).getLOD(); Color boxColor = dPrimeTable.getLDStats(x,y).getColor(); // draw markers above int xx = left + (int)((alignedPositions[x] + alignedPositions[y])/2); int yy = top + (int)((alignedPositions[y] - alignedPositions[x]) / 2); diamondX[0] = xx; diamondY[0] = yy - boxRadius; diamondX[1] = xx + boxRadius; diamondY[1] = yy; diamondX[2] = xx; diamondY[2] = yy + boxRadius; diamondX[3] = xx - boxRadius; diamondY[3] = yy; diamond = new Polygon(diamondX, diamondY, 4); g2.setColor(boxColor); g2.fillPolygon(diamond); if(printValues){ g2.setFont(boxFont); ascent = boxFontMetrics.getAscent(); int val; if (printWhat == D_PRIME){ val = (int) (d * 100); }else if (printWhat == R_SQ){ val = (int) (r * 100); }else{ val = 100; } g2.setColor((val < 50) ? Color.gray : Color.black); if (boxColor.getGreen() < 100 && boxColor.getBlue() < 100 && boxColor.getRed() < 100){ g2.setColor(Color.white); } if (val != 100) { String valu = String.valueOf(val); int widf = boxFontMetrics.stringWidth(valu); g.drawString(valu, xx - widf/2, yy + ascent/2); } } } } //highlight blocks g2.setFont(markerNameFont); ascent = g2.getFontMetrics().getAscent(); //g.setColor(new Color(153,255,153)); g2.setColor(Color.black); //g.setColor(new Color(51,153,51)); for (int i = 0; i < blocks.size(); i++){ int[] theBlock = (int[])blocks.elementAt(i); int first = theBlock[0]; int last = theBlock[theBlock.length-1]; //big vee around whole thing g2.setStroke(fatStroke); g2.draw(new Line2D.Double(left + alignedPositions[first] - boxRadius, top, left + (alignedPositions[first] + alignedPositions[last])/2, top + (alignedPositions[last] - alignedPositions[first])/2 + boxRadius)); g2.draw(new Line2D.Double(left + (alignedPositions[first] + alignedPositions[last])/2, top + (alignedPositions[last] - alignedPositions[first])/2 + boxRadius, left + alignedPositions[last] + boxRadius, top)); for (int j = first; j < last; j++){ g2.setStroke(fatStroke); if (theData.isInBlock[j]){ g2.draw(new Line2D.Double(left+alignedPositions[j]-boxSize/2, top-blockDispHeight, left+alignedPositions[j+1]-boxSize/2, top-blockDispHeight)); }else{ g2.draw(new Line2D.Double(left + alignedPositions[j] + boxSize/2, top-blockDispHeight, left+alignedPositions[j+1]-boxSize/2, top-blockDispHeight)); g2.setStroke(dashedFatStroke); g2.draw(new Line2D.Double(left+alignedPositions[j] - boxSize/2, top-blockDispHeight, left+alignedPositions[j] + boxSize/2, top-blockDispHeight)); } } //cap off the end of the block g2.setStroke(fatStroke); g2.draw(new Line2D.Double(left+alignedPositions[last]-boxSize/2, top-blockDispHeight, left+alignedPositions[last]+boxSize/2, top-blockDispHeight)); //lines to connect to block display g2.setStroke(fatStroke); g2.draw(new Line2D.Double(left + alignedPositions[first]-boxSize/2, top-1, left+alignedPositions[first]-boxSize/2, top-blockDispHeight)); g2.draw(new Line2D.Double(left+alignedPositions[last]+boxSize/2, top-1, left+alignedPositions[last]+boxSize/2, top-blockDispHeight)); if (printMarkerNames){ String labelString = new String ("Block " + (i+1)); if (theData.infoKnown){ long blockSize = Chromosome.getMarker(last).getPosition() - Chromosome.getMarker(first).getPosition(); labelString += " (" + blockSize/1000 + " kb)"; } g2.drawString(labelString, (float)(left+alignedPositions[first]-boxSize/2+TEXT_GAP), (float)(top-boxSize/3)); } } g2.setStroke(thickerStroke); if (showWM && !forExport){ //dataset is big enough to require worldmap if (wmMaxWidth == 0){ wmMaxWidth = visRect.width/3; } double scalefactor; scalefactor = (double)(chartSize.width)/wmMaxWidth; double prefBoxSize = boxSize/(scalefactor*((double)wmMaxWidth/(double)(wmMaxWidth))); //stick WM_BD in the middle of the blank space at the top of the worldmap final int WM_BD_GAP = (int)(infoHeight/(scalefactor*2)); final int WM_BD_HEIGHT = 2; CompoundBorder wmBorder = new CompoundBorder(BorderFactory.createRaisedBevelBorder(), BorderFactory.createLoweredBevelBorder()); if (noImage){ //first time through draw a worldmap if dataset is big: worldmap = new BufferedImage((int)(chartSize.width/scalefactor)+wmBorder.getBorderInsets(this).left*2, (int)(chartSize.height/scalefactor)+wmBorder.getBorderInsets(this).top*2, BufferedImage.TYPE_3BYTE_BGR); Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(BG_GREY); gw2.fillRect(1,1,worldmap.getWidth()-1,worldmap.getHeight()-1); //make a pretty border gw2.setColor(Color.black); wmBorder.paintBorder(this,gw2,0,0,worldmap.getWidth(),worldmap.getHeight()); wmInteriorRect = wmBorder.getInteriorRectangle(this,0,0,worldmap.getWidth(), worldmap.getHeight()); float[] smallDiamondX = new float[4]; float[] smallDiamondY = new float[4]; GeneralPath gp; for (int x = 0; x < Chromosome.getSize()-1; x++){ for (int y = x+1; y < Chromosome.getSize(); y++){ if (dPrimeTable.getLDStats(x,y) == null){ continue; } double xx = ((alignedPositions[y] + alignedPositions[x])/(scalefactor*2)) + wmBorder.getBorderInsets(this).left; double yy = ((alignedPositions[y] - alignedPositions[x] + infoHeight*2)/(scalefactor*2)) + wmBorder.getBorderInsets(this).top; smallDiamondX[0] = (float)xx; smallDiamondY[0] = (float)(yy - prefBoxSize/2); smallDiamondX[1] = (float)(xx + prefBoxSize/2); smallDiamondY[1] = (float)yy; smallDiamondX[2] = (float)xx; smallDiamondY[2] = (float)(yy + prefBoxSize/2); smallDiamondX[3] = (float)(xx - prefBoxSize/2); smallDiamondY[3] = (float)yy; gp = new GeneralPath(GeneralPath.WIND_EVEN_ODD, smallDiamondX.length); gp.moveTo(smallDiamondX[0],smallDiamondY[0]); for (int i = 1; i < smallDiamondX.length; i++){ gp.lineTo(smallDiamondX[i], smallDiamondY[i]); } gp.closePath(); gw2.setColor(dPrimeTable.getLDStats(x,y).getColor()); gw2.fill(gp); } } noImage = false; } //draw block display in worldmap Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(BG_GREY); gw2.fillRect(wmBorder.getBorderInsets(this).left, wmBorder.getBorderInsets(this).top+WM_BD_GAP, wmInteriorRect.width, WM_BD_HEIGHT); gw2.setColor(Color.black); boolean even = true; for (int i = 0; i < blocks.size(); i++){ int first = ((int[])blocks.elementAt(i))[0]; int last = ((int[])blocks.elementAt(i))[((int[])blocks.elementAt(i)).length-1]; int voffset; if (even){ voffset = 0; }else{ voffset = WM_BD_HEIGHT/2; } gw2.fillRect(wmBorder.getBorderInsets(this).left - (int)prefBoxSize/2 + (int)(alignedPositions[first]/scalefactor), wmBorder.getBorderInsets(this).top+voffset+WM_BD_GAP, (int)(prefBoxSize + (alignedPositions[last] - alignedPositions[first])/scalefactor), WM_BD_HEIGHT/2); even = !even; } wmResizeCorner = new Rectangle(visRect.x + worldmap.getWidth() - (worldmap.getWidth()-wmInteriorRect.width)/2, visRect.y + visRect.height - worldmap.getHeight(), (worldmap.getWidth()-wmInteriorRect.width)/2, (worldmap.getHeight() -wmInteriorRect.height)/2); g2.drawImage(worldmap,visRect.x, visRect.y + visRect.height - worldmap.getHeight(), this); wmInteriorRect.x = visRect.x + (worldmap.getWidth() - wmInteriorRect.width)/2; wmInteriorRect.y = visRect.y+visRect.height-worldmap.getHeight() + (worldmap.getHeight() - wmInteriorRect.height)/2; //draw the outline of the viewport g2.setColor(Color.black); double hRatio = wmInteriorRect.getWidth()/pref.getWidth(); double vRatio = wmInteriorRect.getHeight()/pref.getHeight(); int hBump = worldmap.getWidth()-wmInteriorRect.width; int vBump = worldmap.getHeight()-wmInteriorRect.height; //bump a few pixels to avoid drawing on the border g2.drawRect((int)(visRect.x*hRatio)+hBump/2+visRect.x, (int)(visRect.y*vRatio)+vBump/2+(visRect.y + visRect.height - worldmap.getHeight()), (int)(visRect.width*hRatio), (int)(visRect.height*vRatio)); } //see if the user has right-clicked to popup some marker info if(popupDrawRect != null){ //dumb bug where little datasets popup the box in the wrong place int smallDatasetSlopH = 0; int smallDatasetSlopV = 0; if (pref.getHeight() < visRect.height){ smallDatasetSlopV = (int)(visRect.height - pref.getHeight())/2; } if (pref.getWidth() < visRect.width){ smallDatasetSlopH = (int)(visRect.width - pref.getWidth())/2; } g2.setColor(Color.white); g2.fillRect(popupDrawRect.x+1-smallDatasetSlopH, popupDrawRect.y+1-smallDatasetSlopV, popupDrawRect.width-1, popupDrawRect.height-1); g2.setColor(Color.black); g2.drawRect(popupDrawRect.x-smallDatasetSlopH, popupDrawRect.y-smallDatasetSlopV, popupDrawRect.width, popupDrawRect.height); g.setFont(popupFont); for (int x = 0; x < displayStrings.size(); x++){ g.drawString((String)displayStrings.elementAt(x),popupDrawRect.x + popupLeftMargin-smallDatasetSlopH, popupDrawRect.y+((x+1)*metrics.getHeight())-smallDatasetSlopV); } } // draw the cached last right-click selection // The purpose of testing for empty string is just to avoid an 2-unit empty white box if (lastSelection != null){ if ((zoomLevel == 0) && (!lastSelection.equals("")) && (!forExport)) { g2.setFont(boxFont); // a bit extra on all side int last_descent = g2.getFontMetrics().getDescent(); int last_box_x = (visRect.x + LAST_SELECTION_LEFT) - 2; int last_box_y = (visRect.y - g2.getFontMetrics().getHeight() + LAST_SELECTION_TOP + last_descent) - 1 ; int last_box_width = g2.getFontMetrics().stringWidth(lastSelection) + 4; int last_box_height = g2.getFontMetrics().getHeight() + 2; g2.setColor(Color.white); g2.fillRect(last_box_x, last_box_y, last_box_width, last_box_height); g2.setColor(Color.black); g2.drawRect(last_box_x, last_box_y, last_box_width, last_box_height); g2.drawString(lastSelection, LAST_SELECTION_LEFT + visRect.x, LAST_SELECTION_TOP + visRect.y); } } //see if we're drawing a worldmap resize rect if (resizeWMRect != null){ g2.setColor(Color.black); g2.drawRect(resizeWMRect.x, resizeWMRect.y, resizeWMRect.width, resizeWMRect.height); } //see if we're drawing a block selector rect if (blockRect != null){ g2.setColor(Color.black); g2.setStroke(dashedThinStroke); g2.drawRect(blockRect.x, blockRect.y, blockRect.width, blockRect.height); } }
}else{ g2.setColor((val < 50) ? Color.gray : Color.black);
public void paintComponent(Graphics g){ DPrimeTable dPrimeTable = theData.dpTable; if (Chromosome.getSize() < 2){ //if there zero or only one valid marker return; } Vector blocks = theData.blocks; Rectangle visRect = getVisibleRect(); //deal with zooming if (chartSize.getWidth() > (3*visRect.width)){ showWM = true; }else{ showWM = false; } boolean printValues = true; if (zoomLevel != 0 || Options.getPrintWhat() == LD_NONE){ printValues = false; } printWhat = Options.getPrintWhat(); Graphics2D g2 = (Graphics2D) g; Dimension size = getSize(); Dimension pref = getPreferredSize(); g2.setColor(BG_GREY); //if it's a big dataset, resize properly, if it's small make sure to fill whole background if (size.height < pref.height){ g2.fillRect(0,0,pref.width,pref.height); setSize(pref); }else{ g2.fillRect(0,0,size.width, size.height); } g2.setColor(Color.black); //okay so this dumb if block is to prevent the ugly repainting //bug when loading markers after the data are already being displayed, //results in a little off-centering for small datasets, but not too bad. if (!forExport){ if (!theData.infoKnown){ g2.translate((size.width - pref.width) / 2, (size.height - pref.height) / 2); } else { g2.translate((size.width - pref.width) / 2, 0); } } FontMetrics boxFontMetrics = g2.getFontMetrics(boxFont); int diamondX[] = new int[4]; int diamondY[] = new int[4]; Polygon diamond; double lineSpan = alignedPositions[alignedPositions.length-1] - alignedPositions[0]; long minpos = Chromosome.getMarker(0).getPosition(); long maxpos = Chromosome.getMarker(Chromosome.getSize()-1).getPosition(); double spanpos = maxpos - minpos; //See http://www.hapmap.org/cgi-perl/gbrowse/gbrowse_img //for more info on GBrowse img. int imgHeight = 0; if (Options.isGBrowseShown() && Chromosome.getDataChrom() != null && !Chromosome.getDataChrom().equalsIgnoreCase("none")){ g2.drawImage(gBrowseImage,H_BORDER-GBROWSE_MARGIN,V_BORDER,this); imgHeight = gBrowseImage.getHeight(this) + TRACK_GAP; // get height so we can shift everything down } left = H_BORDER; top = V_BORDER + imgHeight; // push the haplotype display down to make room for gbrowse image. if (forExport){ left -= exportStart * boxSize; } FontMetrics metrics; int ascent; g2.setFont(boldMarkerNameFont); metrics = g2.getFontMetrics(); ascent = metrics.getAscent(); //the following values are the bounds on the boxes we want to //display given that the current window is 'visRect' lowX = getBoundaryMarker(visRect.x-clickXShift-(visRect.y +visRect.height-clickYShift)) - 1; highX = getBoundaryMarker(visRect.x + visRect.width); lowY = getBoundaryMarker((visRect.x-clickXShift)+(visRect.y-clickYShift)) - 1; highY = getBoundaryMarker((visRect.x-clickXShift+visRect.width) + (visRect.y-clickYShift+visRect.height)); if (lowX < 0) { lowX = 0; } if (highX > Chromosome.getSize()-1){ highX = Chromosome.getSize()-1; } if (lowY < lowX+1){ lowY = lowX+1; } if (highY > Chromosome.getSize()){ highY = Chromosome.getSize(); } if (forExport){ lowX = exportStart; lowY = exportStart; highX = exportStop; highY = exportStop+1; } if (theData.trackExists){ //draw the analysis track above where the marker positions will be marked JFreeChart jfc = ChartFactory.createXYLineChart(null,null,null, theData.analysisTracks, PlotOrientation.VERTICAL,false,false,false); //customise the analysis track XYPlot xyp = (XYPlot)jfc.getPlot(); //no x axis, since it takes up too much space. xyp.getDomainAxis().setAxisLineVisible(false); xyp.getDomainAxis().setTickLabelsVisible(false); xyp.getDomainAxis().setTickMarksVisible(false); //x range must align with markers xyp.getDomainAxis().setRange(minpos,maxpos); //size of the axis and graph inset double axisWidth = xyp.getRangeAxis(). reserveSpace(g2,xyp,new Rectangle(0,TRACK_HEIGHT),RectangleEdge.LEFT,null).getLeft(); RectangleInsets insets = xyp.getInsets(); jfc.setBackgroundPaint(BG_GREY); BufferedImage bi = jfc.createBufferedImage( (int)(lineSpan + axisWidth + insets.getLeft() + insets.getRight()),TRACK_HEIGHT); //hide the axis in the margin so everything lines up. g2.drawImage(bi,(int)(left - axisWidth - insets.getLeft()),top,this); top += TRACK_HEIGHT + TRACK_GAP; } if (theData.infoKnown) { Color green = new Color(0, 127, 0); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); //// draw the marker locations g2.setStroke(thinnerStroke); g2.setColor(Color.white); g2.fill(new Rectangle2D.Double(left+1, top+1, lineSpan-1, TICK_HEIGHT-1)); g2.setColor(Color.black); g2.draw(new Rectangle2D.Double(left, top, lineSpan, TICK_HEIGHT)); for (int i = 0; i < Chromosome.getSize(); i++){ double pos = (Chromosome.getMarker(i).getPosition() - minpos) / spanpos; double xx = left + lineSpan*pos; // if we're zoomed, use the line color to indicate whether there is extra data available // (since the marker names are not displayed when zoomed) if (Chromosome.getMarker(i).getExtra() != null && zoomLevel != 0) g2.setColor(green); //draw tick g2.setStroke(thickerStroke); g2.draw(new Line2D.Double(xx, top, xx, top + TICK_HEIGHT)); if (Chromosome.getMarker(i).getExtra() != null && zoomLevel != 0) g2.setStroke(thickerStroke); else g2.setStroke(thinnerStroke); //draw connecting line g2.draw(new Line2D.Double(xx, top + TICK_HEIGHT, left + alignedPositions[i], top+TICK_BOTTOM)); if (Chromosome.getMarker(i).getExtra() != null && zoomLevel != 0) g2.setColor(Color.black); } top += TICK_BOTTOM + TICK_HEIGHT; //// draw the marker names if (printMarkerNames){ widestMarkerName = metrics.stringWidth(Chromosome.getMarker(0).getDisplayName()); for (int x = 1; x < Chromosome.getSize(); x++) { int thiswide = metrics.stringWidth(Chromosome.getMarker(x).getDisplayName()); if (thiswide > widestMarkerName) widestMarkerName = thiswide; } g2.translate(left, top + widestMarkerName); g2.rotate(-Math.PI / 2.0); boolean foundSNP = false; for (int x = 0; x < Chromosome.getSize(); x++) { if (theData.isInBlock[x]){ g2.setFont(boldMarkerNameFont); }else{ g2.setFont(markerNameFont); } if (Chromosome.getMarker(x).getExtra() != null) g2.setColor(green); if (theHV != null){ if (Chromosome.getMarker(x).getDisplayName().equals(theHV.getChosenMarker())){ g2.setColor(Color.blue); foundSNP = true; } } g2.drawString(Chromosome.getMarker(x).getDisplayName(),(float)TEXT_GAP, (float)alignedPositions[x] + ascent/3); if (Chromosome.getMarker(x).getExtra() != null) g2.setColor(Color.black); if (foundSNP){ g2.setColor(Color.BLACK); foundSNP = false; } } g2.rotate(Math.PI / 2.0); g2.translate(-left, -(top + widestMarkerName)); // move everybody down top += widestMarkerName + TEXT_GAP; } g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } top += blockDispHeight; //// draw the marker numbers if (printMarkerNames){ g2.setFont(markerNumFont); metrics = g2.getFontMetrics(); ascent = metrics.getAscent(); for (int x = 0; x < Chromosome.getSize(); x++) { String mark = String.valueOf(Chromosome.realIndex[x] + 1); g2.drawString(mark, (float)(left + alignedPositions[x] - metrics.stringWidth(mark)/2), (float)(top + ascent)); } top += boxRadius/2; // give a little space between numbers and boxes } //clickxshift and clickyshift are used later to translate from x,y coords //to the pair of markers comparison at those coords if (!(theData.infoKnown)){ clickXShift = left + (size.width-pref.width)/2; clickYShift = top + (size.height - pref.height)/2; } else { clickXShift = left + (size.width-pref.width)/2; clickYShift = top; } // draw table column by column for (int x = lowX; x < highX; x++) { //always draw the fewest possible boxes if (lowY < x+1){ lowY = x+1; } for (int y = lowY; y < highY; y++) { if (dPrimeTable.getLDStats(x,y) == null){ continue; } double d = dPrimeTable.getLDStats(x,y).getDPrime(); double r = dPrimeTable.getLDStats(x,y).getRSquared(); //double l = dPrimeTable.getLDStats(x,y).getLOD(); Color boxColor = dPrimeTable.getLDStats(x,y).getColor(); // draw markers above int xx = left + (int)((alignedPositions[x] + alignedPositions[y])/2); int yy = top + (int)((alignedPositions[y] - alignedPositions[x]) / 2); diamondX[0] = xx; diamondY[0] = yy - boxRadius; diamondX[1] = xx + boxRadius; diamondY[1] = yy; diamondX[2] = xx; diamondY[2] = yy + boxRadius; diamondX[3] = xx - boxRadius; diamondY[3] = yy; diamond = new Polygon(diamondX, diamondY, 4); g2.setColor(boxColor); g2.fillPolygon(diamond); if(printValues){ g2.setFont(boxFont); ascent = boxFontMetrics.getAscent(); int val; if (printWhat == D_PRIME){ val = (int) (d * 100); }else if (printWhat == R_SQ){ val = (int) (r * 100); }else{ val = 100; } g2.setColor((val < 50) ? Color.gray : Color.black); if (boxColor.getGreen() < 100 && boxColor.getBlue() < 100 && boxColor.getRed() < 100){ g2.setColor(Color.white); } if (val != 100) { String valu = String.valueOf(val); int widf = boxFontMetrics.stringWidth(valu); g.drawString(valu, xx - widf/2, yy + ascent/2); } } } } //highlight blocks g2.setFont(markerNameFont); ascent = g2.getFontMetrics().getAscent(); //g.setColor(new Color(153,255,153)); g2.setColor(Color.black); //g.setColor(new Color(51,153,51)); for (int i = 0; i < blocks.size(); i++){ int[] theBlock = (int[])blocks.elementAt(i); int first = theBlock[0]; int last = theBlock[theBlock.length-1]; //big vee around whole thing g2.setStroke(fatStroke); g2.draw(new Line2D.Double(left + alignedPositions[first] - boxRadius, top, left + (alignedPositions[first] + alignedPositions[last])/2, top + (alignedPositions[last] - alignedPositions[first])/2 + boxRadius)); g2.draw(new Line2D.Double(left + (alignedPositions[first] + alignedPositions[last])/2, top + (alignedPositions[last] - alignedPositions[first])/2 + boxRadius, left + alignedPositions[last] + boxRadius, top)); for (int j = first; j < last; j++){ g2.setStroke(fatStroke); if (theData.isInBlock[j]){ g2.draw(new Line2D.Double(left+alignedPositions[j]-boxSize/2, top-blockDispHeight, left+alignedPositions[j+1]-boxSize/2, top-blockDispHeight)); }else{ g2.draw(new Line2D.Double(left + alignedPositions[j] + boxSize/2, top-blockDispHeight, left+alignedPositions[j+1]-boxSize/2, top-blockDispHeight)); g2.setStroke(dashedFatStroke); g2.draw(new Line2D.Double(left+alignedPositions[j] - boxSize/2, top-blockDispHeight, left+alignedPositions[j] + boxSize/2, top-blockDispHeight)); } } //cap off the end of the block g2.setStroke(fatStroke); g2.draw(new Line2D.Double(left+alignedPositions[last]-boxSize/2, top-blockDispHeight, left+alignedPositions[last]+boxSize/2, top-blockDispHeight)); //lines to connect to block display g2.setStroke(fatStroke); g2.draw(new Line2D.Double(left + alignedPositions[first]-boxSize/2, top-1, left+alignedPositions[first]-boxSize/2, top-blockDispHeight)); g2.draw(new Line2D.Double(left+alignedPositions[last]+boxSize/2, top-1, left+alignedPositions[last]+boxSize/2, top-blockDispHeight)); if (printMarkerNames){ String labelString = new String ("Block " + (i+1)); if (theData.infoKnown){ long blockSize = Chromosome.getMarker(last).getPosition() - Chromosome.getMarker(first).getPosition(); labelString += " (" + blockSize/1000 + " kb)"; } g2.drawString(labelString, (float)(left+alignedPositions[first]-boxSize/2+TEXT_GAP), (float)(top-boxSize/3)); } } g2.setStroke(thickerStroke); if (showWM && !forExport){ //dataset is big enough to require worldmap if (wmMaxWidth == 0){ wmMaxWidth = visRect.width/3; } double scalefactor; scalefactor = (double)(chartSize.width)/wmMaxWidth; double prefBoxSize = boxSize/(scalefactor*((double)wmMaxWidth/(double)(wmMaxWidth))); //stick WM_BD in the middle of the blank space at the top of the worldmap final int WM_BD_GAP = (int)(infoHeight/(scalefactor*2)); final int WM_BD_HEIGHT = 2; CompoundBorder wmBorder = new CompoundBorder(BorderFactory.createRaisedBevelBorder(), BorderFactory.createLoweredBevelBorder()); if (noImage){ //first time through draw a worldmap if dataset is big: worldmap = new BufferedImage((int)(chartSize.width/scalefactor)+wmBorder.getBorderInsets(this).left*2, (int)(chartSize.height/scalefactor)+wmBorder.getBorderInsets(this).top*2, BufferedImage.TYPE_3BYTE_BGR); Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(BG_GREY); gw2.fillRect(1,1,worldmap.getWidth()-1,worldmap.getHeight()-1); //make a pretty border gw2.setColor(Color.black); wmBorder.paintBorder(this,gw2,0,0,worldmap.getWidth(),worldmap.getHeight()); wmInteriorRect = wmBorder.getInteriorRectangle(this,0,0,worldmap.getWidth(), worldmap.getHeight()); float[] smallDiamondX = new float[4]; float[] smallDiamondY = new float[4]; GeneralPath gp; for (int x = 0; x < Chromosome.getSize()-1; x++){ for (int y = x+1; y < Chromosome.getSize(); y++){ if (dPrimeTable.getLDStats(x,y) == null){ continue; } double xx = ((alignedPositions[y] + alignedPositions[x])/(scalefactor*2)) + wmBorder.getBorderInsets(this).left; double yy = ((alignedPositions[y] - alignedPositions[x] + infoHeight*2)/(scalefactor*2)) + wmBorder.getBorderInsets(this).top; smallDiamondX[0] = (float)xx; smallDiamondY[0] = (float)(yy - prefBoxSize/2); smallDiamondX[1] = (float)(xx + prefBoxSize/2); smallDiamondY[1] = (float)yy; smallDiamondX[2] = (float)xx; smallDiamondY[2] = (float)(yy + prefBoxSize/2); smallDiamondX[3] = (float)(xx - prefBoxSize/2); smallDiamondY[3] = (float)yy; gp = new GeneralPath(GeneralPath.WIND_EVEN_ODD, smallDiamondX.length); gp.moveTo(smallDiamondX[0],smallDiamondY[0]); for (int i = 1; i < smallDiamondX.length; i++){ gp.lineTo(smallDiamondX[i], smallDiamondY[i]); } gp.closePath(); gw2.setColor(dPrimeTable.getLDStats(x,y).getColor()); gw2.fill(gp); } } noImage = false; } //draw block display in worldmap Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(BG_GREY); gw2.fillRect(wmBorder.getBorderInsets(this).left, wmBorder.getBorderInsets(this).top+WM_BD_GAP, wmInteriorRect.width, WM_BD_HEIGHT); gw2.setColor(Color.black); boolean even = true; for (int i = 0; i < blocks.size(); i++){ int first = ((int[])blocks.elementAt(i))[0]; int last = ((int[])blocks.elementAt(i))[((int[])blocks.elementAt(i)).length-1]; int voffset; if (even){ voffset = 0; }else{ voffset = WM_BD_HEIGHT/2; } gw2.fillRect(wmBorder.getBorderInsets(this).left - (int)prefBoxSize/2 + (int)(alignedPositions[first]/scalefactor), wmBorder.getBorderInsets(this).top+voffset+WM_BD_GAP, (int)(prefBoxSize + (alignedPositions[last] - alignedPositions[first])/scalefactor), WM_BD_HEIGHT/2); even = !even; } wmResizeCorner = new Rectangle(visRect.x + worldmap.getWidth() - (worldmap.getWidth()-wmInteriorRect.width)/2, visRect.y + visRect.height - worldmap.getHeight(), (worldmap.getWidth()-wmInteriorRect.width)/2, (worldmap.getHeight() -wmInteriorRect.height)/2); g2.drawImage(worldmap,visRect.x, visRect.y + visRect.height - worldmap.getHeight(), this); wmInteriorRect.x = visRect.x + (worldmap.getWidth() - wmInteriorRect.width)/2; wmInteriorRect.y = visRect.y+visRect.height-worldmap.getHeight() + (worldmap.getHeight() - wmInteriorRect.height)/2; //draw the outline of the viewport g2.setColor(Color.black); double hRatio = wmInteriorRect.getWidth()/pref.getWidth(); double vRatio = wmInteriorRect.getHeight()/pref.getHeight(); int hBump = worldmap.getWidth()-wmInteriorRect.width; int vBump = worldmap.getHeight()-wmInteriorRect.height; //bump a few pixels to avoid drawing on the border g2.drawRect((int)(visRect.x*hRatio)+hBump/2+visRect.x, (int)(visRect.y*vRatio)+vBump/2+(visRect.y + visRect.height - worldmap.getHeight()), (int)(visRect.width*hRatio), (int)(visRect.height*vRatio)); } //see if the user has right-clicked to popup some marker info if(popupDrawRect != null){ //dumb bug where little datasets popup the box in the wrong place int smallDatasetSlopH = 0; int smallDatasetSlopV = 0; if (pref.getHeight() < visRect.height){ smallDatasetSlopV = (int)(visRect.height - pref.getHeight())/2; } if (pref.getWidth() < visRect.width){ smallDatasetSlopH = (int)(visRect.width - pref.getWidth())/2; } g2.setColor(Color.white); g2.fillRect(popupDrawRect.x+1-smallDatasetSlopH, popupDrawRect.y+1-smallDatasetSlopV, popupDrawRect.width-1, popupDrawRect.height-1); g2.setColor(Color.black); g2.drawRect(popupDrawRect.x-smallDatasetSlopH, popupDrawRect.y-smallDatasetSlopV, popupDrawRect.width, popupDrawRect.height); g.setFont(popupFont); for (int x = 0; x < displayStrings.size(); x++){ g.drawString((String)displayStrings.elementAt(x),popupDrawRect.x + popupLeftMargin-smallDatasetSlopH, popupDrawRect.y+((x+1)*metrics.getHeight())-smallDatasetSlopV); } } // draw the cached last right-click selection // The purpose of testing for empty string is just to avoid an 2-unit empty white box if (lastSelection != null){ if ((zoomLevel == 0) && (!lastSelection.equals("")) && (!forExport)) { g2.setFont(boxFont); // a bit extra on all side int last_descent = g2.getFontMetrics().getDescent(); int last_box_x = (visRect.x + LAST_SELECTION_LEFT) - 2; int last_box_y = (visRect.y - g2.getFontMetrics().getHeight() + LAST_SELECTION_TOP + last_descent) - 1 ; int last_box_width = g2.getFontMetrics().stringWidth(lastSelection) + 4; int last_box_height = g2.getFontMetrics().getHeight() + 2; g2.setColor(Color.white); g2.fillRect(last_box_x, last_box_y, last_box_width, last_box_height); g2.setColor(Color.black); g2.drawRect(last_box_x, last_box_y, last_box_width, last_box_height); g2.drawString(lastSelection, LAST_SELECTION_LEFT + visRect.x, LAST_SELECTION_TOP + visRect.y); } } //see if we're drawing a worldmap resize rect if (resizeWMRect != null){ g2.setColor(Color.black); g2.drawRect(resizeWMRect.x, resizeWMRect.y, resizeWMRect.width, resizeWMRect.height); } //see if we're drawing a block selector rect if (blockRect != null){ g2.setColor(Color.black); g2.setStroke(dashedThinStroke); g2.drawRect(blockRect.x, blockRect.y, blockRect.width, blockRect.height); } }
if ( db == null ) { log.error( "Could not find dbname for configuration " ); return; } if ( ODMG.initODMG( "", "", db ) ) { log.debug( "Connection succesful!!!" ); } else { log.error( "Error logging into Photovault" ); }
if ( db == null ) { log.error( "Could not find dbname for configuration " ); return; } try { ODMG.initODMG( "", "", db ); log.debug( "Connection succesful!!!" ); } catch (PhotovaultException e ) { log.error( "Error logging into Photovault: " + e.getMessage() ); System.exit( 1 ); }
private JUnitOJBManager() { System.setProperty( "photovault.configfile", "conf/junittest_config.xml" ); log.error( "Initializing OB for JUnit tests" ); createDatabase(); PhotovaultSettings settings = PhotovaultSettings.getSettings(); settings.setConfiguration( "pv_junit" ); PVDatabase db = settings.getDatabase( "pv_junit" ); if ( db == null ) { log.error( "Could not find dbname for configuration " ); return; } if ( ODMG.initODMG( "", "", db ) ) { log.debug( "Connection succesful!!!" ); } else { log.error( "Error logging into Photovault" ); } if ( db.getSchemaVersion() < PVDatabase.CURRENT_SCHEMA_VERSION ) { SchemaUpdateAction updater = new SchemaUpdateAction( db ); updater.upgradeDatabase(); } }
if (isExportLibraries() && parent != null) { parent.registerTagLibrary( namespaceURI, taglib ); }
public void registerTagLibrary(String namespaceURI, TagLibrary taglib) { if (log.isDebugEnabled()) { log.debug("Registering tag library to: " + namespaceURI + " taglib: " + taglib); } taglibs.put(namespaceURI, taglib); if (isExportLibraries() && parent != null) { parent.registerTagLibrary( namespaceURI, taglib ); } }
System.err.println( " update view" );
protected void updateViews( Object source ) { if ( views == null ) { return; } Iterator iter = views.iterator(); while ( iter.hasNext() ) { Object view = iter.next(); if ( view != source ) { updateView( view ); } } }
logger.error("TablePane's parent is "+c.getParent());
public void computeSize(TablePane c) { int height = 0; int width = 0; try { Insets insets = c.getInsets(); SQLTable table = c.getModel(); int cols = table.getColumns().size(); Font font = c.getFont(); if (font == null) { logger.error("Null font in TablePane "+c); return; } FontMetrics metrics = c.getFontMetrics(font); int fontHeight = metrics.getHeight(); height = insets.top + fontHeight + gap + c.getMargin().top + cols*fontHeight + boxLineThickness*2 + c.getMargin().bottom + insets.bottom; width = c.getMinimumSize().width; Iterator columnIt = table.getColumns().iterator(); while (columnIt.hasNext()) { width = Math.max(width, metrics.stringWidth(columnIt.next().toString())); } width += insets.left + c.getMargin().left + boxLineThickness*2 + c.getMargin().right + insets.right; } catch (ArchitectException e) { logger.warn("BasicTablePaneUI.computeSize failed due to", e); width = 100; height = 100; } c.setPreferredSize(new Dimension(width, height)); c.setSize(width, height); // XXX: maybe this should go elsewhere (not sure where) }
dbcs.setName("Target Database"); dbcs.setDisplayName("Target Database");
dbcs.setName("Not Configured"); dbcs.setDisplayName("Not Configured");
private final void setDatabase(SQLDatabase newdb) { if (newdb == null) throw new NullPointerException("db must be non-null"); this.db = newdb; db.setIgnoreReset(true); if (db.getDataSource() == null) { ArchitectDataSource dbcs = new ArchitectDataSource(); dbcs.setName("Target Database"); dbcs.setDisplayName("Target Database"); db.setDataSource(dbcs); } try { ArchitectUtils.listenToHierarchy(this, db); } catch (ArchitectException ex) { logger.error("Couldn't listen to database", ex); } tableNames = new HashSet(); }
doublehet = 0; int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; }
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
if (m1a2==0){ if (m1H==0){ return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } }
if (Chromosome.getMarker(pos1).getMAF() == 0 || Chromosome.getMarker(pos2).getMAF() == 0){ return null;
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
marker1num[m1a1]=1; marker1num[m1a2]=2;
marker1num[Chromosome.getMarker(pos1).getMajor()]=1; marker1num[Chromosome.getMarker(pos1).getMinor()]=2;
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
marker2num[m2a1]=1; marker2num[m2a2]=2;
marker2num[Chromosome.getMarker(pos2).getMajor()]=1; marker2num[Chromosome.getMarker(pos2).getMinor()]=2;
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
} else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++;
} else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++;
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
else if (a1 == 5){
else if (a1 >= 5){
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
} else if (a2 == 5){
} else if (a2 >= 5){
public PairwiseLinkage computeDPrime(int pos1, int pos2){ long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ return null; } } compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ //System.out.println("Marker " + (pos1+1) + " is monomorphic.");//TODO Make this happier return null; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ return null; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[105]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*log10(probHaps[AA]) + known[AB]*log10(probHaps[AB]) + known[BA]*log10(probHaps[BA]) + known[BB]*log10(probHaps[BB]) + (double)unknownDH*log10(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*log10(pA1*pA2) + known[AB]*log10(pA1*pB2) + known[BA]*log10(pB1*pA2) + known[BB]*log10(pB1*pB2) + (double)unknownDH*log10(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ tmp=pA2; pA2=pB2; pB2=tmp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = known[AA]*log10(tmpAA) + known[AB]*log10(tmpAB) + known[BA]*log10(tmpBA) + known[BB]*log10(tmpBB) + (double)unknownDH*log10(tmpAA*tmpBB + tmpAB*tmpBA); } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); }
int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5];
} */
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length);
byte[] thisHap;
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
if (theGeno == 5){ hapstr.append("h");
if (theGeno >= 5){ thisHap[j] = 'h';
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
hapstr.append(convert[j][theGeno]);
if (theGeno == a1){ thisHap[j] = '1'; }else if (theGeno == a2){ thisHap[j] = '2'; }else{ thisHap[j] = '0'; }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length);
inputHaploVector.add(thisHap); thisHap = new byte[theBlock.length];
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]);
if (nextGeno >= 5){ thisHap[j] = 'h'; } else { if (nextGeno == a1){ thisHap[j] = '1'; }else if (nextGeno == a2){ thisHap[j] = '2'; }else{ thisHap[j] = '0'; }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length);
inputHaploVector.add(thisHap);
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]);
byte[][] input_haplos = (byte[][])inputHaploVector.toArray(new byte[0][0]);
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0);
theEM.full_em_breakup(input_haplos, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0);
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))];
byte returnBit = Byte.parseByte(aString.substring(j,j+1)); if (returnBit == 1){ genos[j] = Chromosome.getFilteredMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getFilteredMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getFilteredMarker(theBlock[j]).getMinor(); } }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] theBlock = (int[])blocks.elementAt(k); int[] hetcount = new int[theBlock.length]; int[][] loc = new int[theBlock.length][5]; int[][] convert = new int[theBlock.length][5]; int[][] unconvert = new int[theBlock.length][5]; //int totalHaps = 0; //parse genotypes for unresolved heterozygotes for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hetcount[j]++; } else { loc[j][theGeno]++; } } //totalHaps ++; } for (int j = 0; j < theBlock.length; j++){ int a = 1; for (int m = 1; m <= 4; m++){ if (loc[j][m] > 0){ convert[j][m]=a; unconvert[j][a]=m; loc[j][m]+=(hetcount[j]/2); a++; } else { convert[j][m] = 0; unconvert[j][a] = 8; } } if (unconvert[j][2] == 0) unconvert[j][2] = 8; } StringBuffer hapstr = new StringBuffer(theBlock.length); Vector inputHaploVector = new Vector(); for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); int missing=0; //int dhet=0; for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if(theGeno == 0 || nextGeno == 0) missing++; } if (! (missing > theBlock.length/2 || missing > missingLimit)){ for (int j = 0; j < theBlock.length; j++){ byte theGeno = thisChrom.getFilteredGenotype(theBlock[j]); if (theGeno == 5){ hapstr.append("h"); } else { hapstr.append(convert[j][theGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); for (int j = 0; j < theBlock.length; j++){ byte nextGeno = nextChrom.getFilteredGenotype(theBlock[j]); if (nextGeno == 5){ hapstr.append("h"); }else{ hapstr.append(convert[j][nextGeno]); } } inputHaploVector.add(hapstr.toString()); hapstr = new StringBuffer(theBlock.length); } } String[] input_haplos = (String[])inputHaploVector.toArray(new String[0]); //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } String EMreturn = new String(""); int[] num_haplos_present = new int[1]; Vector haplos_present = new Vector(); Vector haplo_freq = new Vector(); char[][] input_haplos2 = new char[input_haplos.length][]; for (int j = 0; j < input_haplos.length; j++){ input_haplos2[j] = input_haplos[j].toCharArray(); } //kirby patch EM theEM = new EM(); theEM.full_em_breakup(input_haplos2, 4, num_haplos_present, haplos_present, haplo_freq, block_size, 0); for (int j = 0; j < haplos_present.size(); j++){ EMreturn += (String)haplos_present.elementAt(j)+"\t"+(String)haplo_freq.elementAt(j)+"\t"; } StringTokenizer st = new StringTokenizer(EMreturn); int p = 0; Haplotype[] tempArray = new Haplotype[st.countTokens()/2]; while(st.hasMoreTokens()){ String aString = st.nextToken(); int[] genos = new int[aString.length()]; for (int j = 0; j < aString.length(); j++){ genos[j] = unconvert[j][Integer.parseInt(aString.substring(j, j+1))]; } double tempPerc = Double.parseDouble(st.nextToken()); if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, theBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; } } return results; }
genos[q] = 5;
genos[q] = 9;
void prepareHapsInput(File infile) throws IOException, HaploViewException{ //this method is called to suck in data from a file (its only argument) //of genotypes and sets up the Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; if(infile.length() < 1){ throw new HaploViewException("Genotype file is empty or does not exist: " + infile.getName()); } //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; int numTokens = 0; boolean even = true; while ((currentLine = in.readLine()) != null){ lineCount++; //each line is expected to be of the format: //ped indiv geno geno geno geno... if (currentLine.length() == 0){ //skip blank lines continue; } even = !even; StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv ped = st.nextToken(); indiv = st.nextToken(); //all other tokens are loaded into a vector (they should all be genotypes) genos = new byte[st.countTokens()]; int q = 0; if (numTokens == 0){ numTokens = st.countTokens(); } if (numTokens != st.countTokens()){ throw new HaploViewException("Genotype file error:\nLine " + lineCount + " appears to have an incorrect number of entries"); } while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 5; }else{ try{ genos[q] = Byte.parseByte(thisGenotype); }catch (NumberFormatException nfe){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + thisGenotype + "\" on line " + lineCount + " not allowed."); } } if (genos[q] < 0 || genos[q] > 5){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + genos[q] + "\" on line " + lineCount + " not allowed."); } q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, false, infile.getName())); } if (!even){ //we're missing a line here throw new HaploViewException("Genotype file appears to have an odd number of lines.\n"+ "Each individual is required to have two chromosomes"); } chromosomes = chroms; //initialize realIndex Chromosome.realIndex = new int[genos.length]; for (int i = 0; i < genos.length; i++){ Chromosome.realIndex[i] = i; } }
if (genos[q] < 0 || genos[q] > 5){
if (genos[q] < 0 || genos[q] > 9){
void prepareHapsInput(File infile) throws IOException, HaploViewException{ //this method is called to suck in data from a file (its only argument) //of genotypes and sets up the Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; if(infile.length() < 1){ throw new HaploViewException("Genotype file is empty or does not exist: " + infile.getName()); } //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; int numTokens = 0; boolean even = true; while ((currentLine = in.readLine()) != null){ lineCount++; //each line is expected to be of the format: //ped indiv geno geno geno geno... if (currentLine.length() == 0){ //skip blank lines continue; } even = !even; StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv ped = st.nextToken(); indiv = st.nextToken(); //all other tokens are loaded into a vector (they should all be genotypes) genos = new byte[st.countTokens()]; int q = 0; if (numTokens == 0){ numTokens = st.countTokens(); } if (numTokens != st.countTokens()){ throw new HaploViewException("Genotype file error:\nLine " + lineCount + " appears to have an incorrect number of entries"); } while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 5; }else{ try{ genos[q] = Byte.parseByte(thisGenotype); }catch (NumberFormatException nfe){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + thisGenotype + "\" on line " + lineCount + " not allowed."); } } if (genos[q] < 0 || genos[q] > 5){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + genos[q] + "\" on line " + lineCount + " not allowed."); } q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, false, infile.getName())); } if (!even){ //we're missing a line here throw new HaploViewException("Genotype file appears to have an odd number of lines.\n"+ "Each individual is required to have two chromosomes"); } chromosomes = chroms; //initialize realIndex Chromosome.realIndex = new int[genos.length]; for (int i = 0; i < genos.length; i++){ Chromosome.realIndex[i] = i; } }
byte a1 = 0;
byte a1 = 0; byte a2 = 0;
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists Vector names = new Vector(); Vector positions = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } prevloc = loc; names.add(name); positions.add(l); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); } infoKnown = true; } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0)); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
if (thisAllele == 5){
if (thisAllele >= 5){
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists Vector names = new Vector(); Vector positions = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } prevloc = loc; names.add(name); positions.add(l); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); } infoKnown = true; } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0)); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
Math.rint(maf*100.0)/100.0));
Math.rint(maf*100.0)/100.0, a1, a2));
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists Vector names = new Vector(); Vector positions = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } prevloc = loc; names.add(name); positions.add(l); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); } infoKnown = true; } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0)); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0));
markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0,a1,a2));
void prepareMarkerInput(File infile, long md, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists Vector names = new Vector(); Vector positions = new Vector(); maxdist = md; negMaxdist = -1 * maxdist; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } if (loc < prevloc){ throw new HaploViewException("Info file out of order:\n"+ name); } prevloc = loc; names.add(name); positions.add(l); } if (lineCount > Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file.")); } if (lineCount < Chromosome.getSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ names.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); } infoKnown = true; } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; if (infoKnown){ markerInfo.add(new SNP((String)names.elementAt(i), Long.parseLong((String)positions.elementAt(i)), Math.rint(maf*100.0)/100.0)); }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), Math.rint(maf*100.0)/100.0)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
viewPane.setCollection( e.getSelected() );
PhotoFolder f = e.getSelected(); if ( f != null ) { viewPane.setCollection( f ); }
protected void createUI() { tabPane = new JTabbedPane(); queryPane = new QueryPane(); treePane = new PhotoFolderTree(); tabPane.addTab( "Query", queryPane ); tabPane.addTab( "Folders", treePane ); viewPane = new TableCollectionView(); viewPane.setCollection( queryPane.getResultCollection() ); // Set listeners to both query and folder tree panes /* If an actionEvent comes from queryPane & the viewed folder is no the query resouts, swich to it (the result folder will be nodified of changes to quert parameters directly */ queryPane.addActionListener( new ActionListener() { public void actionPerformed( ActionEvent e ) { if ( viewPane.getCollection() != queryPane.getResultCollection() ) { viewPane.setCollection( queryPane.getResultCollection() ); } } } ); /* If the selected folder is changed in treePane, switch to that immediately */ treePane.addPhotoFolderTreeListener( new PhotoFolderTreeListener() { public void photoFolderTreeSelectionChanged( PhotoFolderTreeEvent e ) { viewPane.setCollection( e.getSelected() ); } } ); // Create the split pane to display both of these components JSplitPane split = new JSplitPane( JSplitPane.HORIZONTAL_SPLIT, tabPane, viewPane ); Container cp = getContentPane(); cp.setLayout( new BorderLayout() ); cp.add( split, BorderLayout.CENTER ); // Create the menu bar & menus JMenuBar menuBar = new JMenuBar(); setJMenuBar( menuBar ); JMenu fileMenu = new JMenu( "File" ); fileMenu.setMnemonic(KeyEvent.VK_F); menuBar.add( fileMenu ); JMenuItem importItem = new JMenuItem( "Import image...", KeyEvent.VK_I ); importItem.addActionListener( new ActionListener() { public void actionPerformed( ActionEvent e ) { importFile(); } }); fileMenu.add( importItem ); JMenuItem exitItem = new JMenuItem( "Exit", KeyEvent.VK_X ); exitItem.addActionListener( new ActionListener() { public void actionPerformed( ActionEvent e ) { System.exit( 0 ); } }); fileMenu.add( exitItem ); pack(); }
viewPane.setCollection( e.getSelected() );
PhotoFolder f = e.getSelected(); if ( f != null ) { viewPane.setCollection( f ); }
public void photoFolderTreeSelectionChanged( PhotoFolderTreeEvent e ) { viewPane.setCollection( e.getSelected() ); }
throw new JellyException( "This tag does not understand the attribute '" + name + "'" );
throw new JellyException( "This tag does not understand the attribute '" + name + "'", getColumnNumber(), getLineNumber() );
public Script compile() throws Exception { if (tag instanceof CompilableTag) { ((CompilableTag) tag).compile(); } List typeList = new ArrayList(); List methodList = new ArrayList(); List expressionList = new ArrayList(); BeanInfo info = Introspector.getBeanInfo(tag.getClass()); PropertyDescriptor[] descriptors = info.getPropertyDescriptors(); Set attributeSet = new HashSet(); if (descriptors != null) { for (int i = 0, size = descriptors.length; i < size; i++) { PropertyDescriptor descriptor = descriptors[i]; String name = descriptor.getName(); Expression expression = (Expression) attributes.get(name); if (expression != null) { attributeSet.add( name ); Method writeMethod = descriptor.getWriteMethod(); if (writeMethod != null) { Class type = descriptor.getPropertyType(); expressionList.add(expression); methodList.add(writeMethod); typeList.add(type); if (log.isDebugEnabled()) { log.debug( "Adding tag property name: " + name + " type: " + type.getName() + " expression: " + expression); } } } } } // System.err.println( "BeanTagScript::compile() " + this ); // now create the arrays to avoid object allocation & casting when // running the script int size = expressionList.size(); expressions = new Expression[size]; methods = new Method[size]; types = new Class[size]; expressionList.toArray(expressions); methodList.toArray(methods); typeList.toArray(types); // compile body tag.setBody(tag.getBody().compile()); // now lets check for any attributes that are not used for ( Iterator iter = attributes.keySet().iterator(); iter.hasNext(); ) { String name = (String) iter.next(); if ( ! attributeSet.contains( name ) ) { throw new JellyException( "This tag does not understand the attribute '" + name + "'" ); } } return this; }
+ valueTypeName + ". Exception: " + e, e
+ valueTypeName + ". Exception: " + e, getColumnNumber(), getLineNumber()
public void run(JellyContext context, XMLOutput output) throws Exception { tag.setContext(context); // initialize all the properties of the tag before its used // if there is a problem abort this tag for (int i = 0, size = expressions.length; i < size; i++) { Expression expression = expressions[i]; Method method = methods[i]; Class type = types[i]; // some types are Expression objects so let the tag // evaluate them Object value = null; if (type.isAssignableFrom(Expression.class) && !type.isAssignableFrom(Object.class)) { value = expression; } else { value = expression.evaluate(context); } // convert value to correct type if (value != null) { value = convertType(value, type); } Object[] arguments = { value }; try { method.invoke(tag, arguments); } catch (Exception e) { String valueTypeName = (value != null ) ? value.getClass().getName() : "null"; log.warn( "Cannot call method: " + method.getName() + " as I cannot convert: " + value + " of type: " + valueTypeName + " into type: " + type.getName() ); throw new JellyException( "Cannot call method: " + method.getName() + " on tag of type: " + tag.getClass().getName() + " with value: " + value + " of type: " + valueTypeName + ". Exception: " + e, e ); } } runTag(output); }
runTag(output);
try { tag.doTag(output); } catch (JellyException e) { handleException(e); } catch (Exception e) { handleException(e); }
public void run(JellyContext context, XMLOutput output) throws Exception { tag.setContext(context); // initialize all the properties of the tag before its used // if there is a problem abort this tag for (int i = 0, size = expressions.length; i < size; i++) { Expression expression = expressions[i]; Method method = methods[i]; Class type = types[i]; // some types are Expression objects so let the tag // evaluate them Object value = null; if (type.isAssignableFrom(Expression.class) && !type.isAssignableFrom(Object.class)) { value = expression; } else { value = expression.evaluate(context); } // convert value to correct type if (value != null) { value = convertType(value, type); } Object[] arguments = { value }; try { method.invoke(tag, arguments); } catch (Exception e) { String valueTypeName = (value != null ) ? value.getClass().getName() : "null"; log.warn( "Cannot call method: " + method.getName() + " as I cannot convert: " + value + " of type: " + valueTypeName + " into type: " + type.getName() ); throw new JellyException( "Cannot call method: " + method.getName() + " on tag of type: " + tag.getClass().getName() + " with value: " + value + " of type: " + valueTypeName + ". Exception: " + e, e ); } } runTag(output); }
setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
public ArchitectFrame() throws ArchitectException { mainInstance = this; architectSession = ArchitectSession.getInstance(); init(); }
public void exit() {
public void exit() { if (getProject().isSaveInProgress()) { JOptionPane.showMessageDialog(this, "Project is saving, cannot exit the Power Architect. Please wait for the save to finish, and then try again.", "Warning", JOptionPane.WARNING_MESSAGE); return; }
public void exit() { if (promptForUnsavedModifications()) { try { closeProject(getProject()); saveSettings(); } catch (ArchitectException e) { logger.error("Couldn't save settings: "+e); } System.exit(0); } }
} finally { project.setSaveInProgress(false);
public boolean saveOrSaveAs(boolean showChooser, boolean separateThread) { if (project.getFile() == null || showChooser) { JFileChooser chooser = new JFileChooser(project.getFile()); chooser.addChoosableFileFilter(ASUtils.ARCHITECT_FILE_FILTER); int response = chooser.showSaveDialog(ArchitectFrame.this); if (response != JFileChooser.APPROVE_OPTION) { return false; } else { File file = chooser.getSelectedFile(); if (!file.getPath().endsWith(".architect")) { file = new File(file.getPath()+".architect"); } if (file.exists()) { response = JOptionPane.showConfirmDialog( ArchitectFrame.this, "The file\n\n"+file.getPath()+"\n\nalready exists. Do you want to overwrite it?", "File Exists", JOptionPane.YES_NO_OPTION); if (response == JOptionPane.NO_OPTION) { return saveOrSaveAs(true, separateThread); } } project.setFile(file); String projName = file.getName().substring(0, file.getName().length()-".architect".length()); project.setName(projName); setTitle(projName); } } final boolean finalSeparateThread = separateThread; final ProgressMonitor pm = new ProgressMonitor (ArchitectFrame.this, "Saving Project", "", 0, 100); Runnable saveTask = new Runnable() { public void run() { try { lastSaveOpSuccessful = false; project.save(finalSeparateThread ? pm : null); lastSaveOpSuccessful = true; } catch (Exception ex) { lastSaveOpSuccessful = false; JOptionPane.showMessageDialog (ArchitectFrame.this, "Can't save project: "+ex.getMessage()); logger.error("Got exception while saving project", ex); } } }; if (separateThread) { new Thread(saveTask).start(); return true; // this is an optimistic lie } else { saveTask.run(); return lastSaveOpSuccessful; } }
} finally { project.setSaveInProgress(false);
public void run() { try { lastSaveOpSuccessful = false; project.save(finalSeparateThread ? pm : null); lastSaveOpSuccessful = true; } catch (Exception ex) { lastSaveOpSuccessful = false; JOptionPane.showMessageDialog (ArchitectFrame.this, "Can't save project: "+ex.getMessage()); logger.error("Got exception while saving project", ex); } }
fc.setSelectedFile(null);
fc.setSelectedFile(new File(""));
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); } else if (command == READ_MARKERS){ //JFileChooser fc = new JFileChooser(System.getProperty("user.dir")); fc.setSelectedFile(null); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile()); } }else if (command == CUST_BLOCKS){ TweakBlockDefsDialog tweakDialog = new TweakBlockDefsDialog("Customize Blocks", this); tweakDialog.pack(); tweakDialog.setVisible(true); }else if (command == CLEAR_BLOCKS){ colorMenuItems[0].setSelected(true); for (int i = 1; i< colorMenuItems.length; i++){ colorMenuItems[i].setEnabled(false); } changeBlocks(3,1); //blockdef clauses }else if (command.startsWith("block")){ int method = Integer.valueOf(command.substring(5)).intValue(); changeBlocks(method,1); for (int i = 1; i < colorMenuItems.length; i++){ if (method+1 == i){ colorMenuItems[i].setEnabled(true); }else{ colorMenuItems[i].setEnabled(false); } } colorMenuItems[0].setSelected(true); //zooming clauses }else if (command.startsWith("zoom")){ dPrimeDisplay.zoom(Integer.valueOf(command.substring(4)).intValue()); //coloring clauses }else if (command.startsWith("color")){ dPrimeDisplay.refresh(Integer.valueOf(command.substring(5)).intValue()+1); changeKey(Integer.valueOf(command.substring(5)).intValue()+1); //exporting clauses }else if (command == EXPORT_PNG){ fc.setSelectedFile(null); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), PNG_MODE, fc.getSelectedFile()); } }else if (command == EXPORT_TEXT){ fc.setSelectedFile(null); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), TXT_MODE, fc.getSelectedFile()); } }else if (command == "Select All"){ checkPanel.selectAll(); }else if (command == "Rescore Markers"){ String cut = hwcut.getText(); if (cut.equals("")){ cut = "0"; } CheckData.hwCut = Double.parseDouble(cut); cut = genocut.getText(); if (cut.equals("")){ cut="0"; } CheckData.failedGenoCut = Integer.parseInt(cut); cut = mendcut.getText(); if (cut.equals("")){ cut="0"; } CheckData.numMendErrCut = Integer.parseInt(cut); checkPanel.redoRatings(); }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
this.session = session;
this.session = s;
public void setSession(Session s) { this.session = session; }
if (parent instanceof ScrolledComposite && widget instanceof Control) { ScrolledComposite scrolledComposite = (ScrolledComposite) parent; scrolledComposite.setContent((Control) widget); }
protected void attachWidgets(Widget parent, Widget widget) { }
log.debug( "entry: indexDirectory " + dir.getAbsolutePath() );
void indexDirectory( File dir, PhotoFolder folder, int startPercent, int endPercent ) { /** Maintain information how many instances for the photos that were previously added to the folder is found */ HashMap photoInstanceCounts = new HashMap(); HashSet foldersNotFound = new HashSet(); if ( folder != null ) { for ( int n = 0; n < folder.getPhotoCount(); n++ ) { photoInstanceCounts.put( folder.getPhoto( n ), new Integer( 0 ) ); } for ( int n = 0; n < folder.getSubfolderCount(); n++ ) { foldersNotFound.add( folder.getSubfolder( n ) ); } } File files[] = dir.listFiles(); // Count the files int fileCount = 0; int subdirCount = 0; for ( int n = 0; n < files.length; n++ ) { if ( files[n].isDirectory() ) { subdirCount++; } else { fileCount++; } } ProgressCalculator c = new ProgressCalculator( startPercent, endPercent, fileCount, subdirCount ); int nFile = 0; int nDir = 0; for ( int n = 0; n < files.length; n++ ) { File f = files[n]; if ( f.isDirectory() ) { // Create the matching folder PhotoFolder subfolder = null; if ( folder != null ) { subfolder = findSubfolderByName( folder, f.getName() ); if ( subfolder == null ) { subfolder = PhotoFolder.create( f.getName(), folder ); newFolderCount++; } else { foldersNotFound.remove( subfolder ); } } /* Calclate the start & end percentages to use when indexing this directory. Formula goes so that we estimate that to index current dirctory completely we must index files in subDirCount+1 directories (all subdirs + current directory). So we divide endPercent - startPercent into this many steps) */ int subdirStart = c.getProgress(); nDir++; c.setProcessedSubdirs( nDir ); int subdirEnd = c.getProgress(); indexDirectory( f, subfolder, subdirStart, subdirEnd ); percentComplete = c.getProgress(); } else { if ( f.canRead() ) { currentEvent = new ExtVolIndexerEvent( this ); PhotoInfo p = indexFile( f ); if ( p != null ) { if ( photoInstanceCounts.containsKey( p ) ) { // The photo is already in this folder int refCount = ((Integer)photoInstanceCounts.get( p ) ).intValue(); photoInstanceCounts.remove( p ); photoInstanceCounts.put( p, new Integer( refCount+1 )); } else { // The photo is not yet in this folder folder.addPhoto( p ); photoInstanceCounts.put( p, new Integer( 1 )); } } nFile++; c.setProcessedFiles( nFile ); percentComplete = c.getProgress(); notifyListeners( currentEvent ); } } } /* Check if some of the photos that were in folder before were not found in this directory */ Iterator iter = photoInstanceCounts.keySet().iterator(); while ( iter.hasNext() ) { PhotoInfo p = (PhotoInfo ) iter.next(); int refCount = ((Integer)photoInstanceCounts.get( p )).intValue(); if ( refCount == 0 ) { folder.removePhoto( p ); } } // Delete folders that were not anymore found iter = foldersNotFound.iterator(); while ( iter.hasNext() ) { PhotoFolder subfolder = (PhotoFolder)iter.next(); subfolder.delete(); } }
log.debug( "exit: indexFile " + f.getAbsolutePath() );
PhotoInfo indexFile( File f ) { indexedFileCount++; // Check if the instance already exists n database ImageInstance oldInstance = null; try { oldInstance = ImageInstance.retrieve( volume, volume.mapFileToVolumeRelativeName(f ) ); } catch ( PhotoNotFoundException e ) { // No action, there just were no matching instances in database } if ( oldInstance != null ) { // There is an existing instance, check whether the data matches if ( oldInstance.doConsistencyCheck() ) { PhotoInfo photo = null; try { photo = PhotoInfo.retrievePhotoInfo(oldInstance.getPhotoUid()); } catch (PhotoNotFoundException ex) { ex.printStackTrace(); } return photo; } else { PhotoInfo photo = null; try { photo = PhotoInfo.retrievePhotoInfo(oldInstance.getPhotoUid()); } catch (PhotoNotFoundException ex) { ex.printStackTrace(); } Vector instances = photo.getInstances(); int instNum = instances.indexOf( oldInstance ); if ( instNum >= 0 ) { photo.removeInstance( instNum ); } } } // Check whether this is really an image file ODMGXAWrapper txw = new ODMGXAWrapper(); ImageInstance instance = ImageInstance.create( volume, f ); if ( instance == null ) { currentEvent.setResult( ExtVolIndexerEvent.RESULT_NOT_IMAGE ); /* ImageInstance already aborts transaction if reading image file was unsuccessfull. */ return null; } byte[] hash = instance.getHash(); // Check whether there is already an image instance with the same hash PhotoInfo matchingPhotos[] = PhotoInfo.retrieveByOrigHash( hash ); PhotoInfo photo = null; if ( matchingPhotos != null && matchingPhotos.length > 0 ) { // If yes then get the PhotoInfo and add this file as an instance with // the same type as the one with same hash. If only PhotoInfo with no // instances add as original for that photo = matchingPhotos[0]; photo.addInstance( instance ); currentEvent.setResult( ExtVolIndexerEvent.RESULT_NEW_INSTANCE ); newInstanceCount++; } else { photo = PhotoInfo.create(); photo.addInstance( instance ); photo.updateFromOriginalFile(); // Create a thumbnail for this photo photo.getThumbnail(); currentEvent.setResult( ExtVolIndexerEvent.RESULT_NEW_PHOTO ); newInstanceCount++; newPhotoCount++; } currentEvent.setPhoto( photo ); txw.commit(); return photo; }
public void doTag(XMLOutput output) throws Exception { Class throwableClass = getThrowableClass();
public void doTag(XMLOutput output) throws JellyTagException { Class throwableClass = null;
public void doTag(XMLOutput output) throws Exception { Class throwableClass = getThrowableClass(); try { invokeBody(output); } catch (Throwable t) { if (t instanceof JellyException) { // unwrap Jelly exceptions which wrap other exceptions JellyException je = (JellyException) t; if (je.getCause() != null) { t = je.getCause(); } } if (var != null) { context.setVariable(var, t); } if (throwableClass != null && !throwableClass.isAssignableFrom(t.getClass())) { fail("Unexpected exception: " + t); } else { return; } } fail("No exception was thrown."); }
throwableClass = getThrowableClass();
public void doTag(XMLOutput output) throws Exception { Class throwableClass = getThrowableClass(); try { invokeBody(output); } catch (Throwable t) { if (t instanceof JellyException) { // unwrap Jelly exceptions which wrap other exceptions JellyException je = (JellyException) t; if (je.getCause() != null) { t = je.getCause(); } } if (var != null) { context.setVariable(var, t); } if (throwableClass != null && !throwableClass.isAssignableFrom(t.getClass())) { fail("Unexpected exception: " + t); } else { return; } } fail("No exception was thrown."); }
if (userCancel) return;
if (userCancel) { remove(col.getParentTable()); return; }
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException, ArchitectException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; try { ddlg = (DDLGenerator) DDLUtils.createDDLGenerator( col1.getParentTable().getParentDatabase().getDataSource()); } catch (InstantiationException e1) { throw new ArchitectException("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { throw new ArchitectException("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); for (SQLColumn col : columns ) { synchronized (monitorableMutex) { if (userCancel) return; } ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = null; long profileStartTime = System.currentTimeMillis(); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + ")."); pfd = discoverProfileFunctionDescriptor(col,ddlg,conn); } try { colResult = execProfileFunction(pfd,col,ddlg,conn); } catch ( Exception ex ) { colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(profileStartTime); colResult.setError(true); colResult.setException(ex); colResult.setCreateEndTime(System.currentTimeMillis()); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { putResult(colResult); } synchronized (monitorableMutex) { progress++; if (userCancel) break; } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
if (userCancel) break;
if (userCancel) { remove(col.getParentTable()); break; }
private void doColumnProfile(List<SQLColumn> columns, Connection conn) throws SQLException, ArchitectException { Statement stmt = null; ResultSet rs = null; String lastSQL = null; try { if ( columns.size() == 0 ) return; SQLColumn col1 = columns.get(0); DDLGenerator ddlg = null; try { ddlg = (DDLGenerator) DDLUtils.createDDLGenerator( col1.getParentTable().getParentDatabase().getDataSource()); } catch (InstantiationException e1) { throw new ArchitectException("problem running Profile Manager", e1); } catch ( IllegalAccessException e1 ) { throw new ArchitectException("problem running Profile Manager", e1); } stmt = conn.createStatement(); stmt.setEscapeProcessing(false); for (SQLColumn col : columns ) { synchronized (monitorableMutex) { if (userCancel) return; } ProfileFunctionDescriptor pfd = ddlg.getProfileFunctionMap().get(col.getSourceDataTypeName()); ColumnProfileResult colResult = null; long profileStartTime = System.currentTimeMillis(); if ( pfd == null ) { System.out.println(col.getName()+ " Unknown DataType:(" + col.getSourceDataTypeName() + ")."); pfd = discoverProfileFunctionDescriptor(col,ddlg,conn); } try { colResult = execProfileFunction(pfd,col,ddlg,conn); } catch ( Exception ex ) { colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(profileStartTime); colResult.setError(true); colResult.setException(ex); colResult.setCreateEndTime(System.currentTimeMillis()); logger.error("Error in Column Profiling: "+lastSQL, ex); } finally { putResult(colResult); } synchronized (monitorableMutex) { progress++; if (userCancel) break; } } // XXX: add where filter later } finally { try { if (rs != null) rs.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } try { if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up statement", ex); } } }
int i = 0;
final int i = 0;
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
sql.append(",\n COUNT(DISTINCT \"");
sql.append(",\n COUNT(DISTINCT "); sql.append(databaseIdentifierQuoteString);
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
sql.append("\") AS DISTINCTCOUNT_"+i);
sql.append(databaseIdentifierQuoteString); sql.append(") AS DISTINCTCOUNT_"+i); tryCount++;
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
sql.append(",\n MIN(\"");
sql.append(",\n MIN("); sql.append(databaseIdentifierQuoteString);
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
sql.append("\") AS MINVALUE_"+i);
sql.append(databaseIdentifierQuoteString); sql.append(") AS MINVALUE_"+i); tryCount++;
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
sql.append(",\n MAX(\"");
sql.append(",\n MAX("); sql.append(databaseIdentifierQuoteString);
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
sql.append("\") AS MAXVALUE_"+i);
sql.append(databaseIdentifierQuoteString); sql.append(") AS MAXVALUE_"+i); tryCount++;
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\""));
sql.append(ddlg.getAverageSQLFunctionName(databaseIdentifierQuoteString+ col.getName()+ databaseIdentifierQuoteString));
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\""));
sql.append(ddlg.getStringLengthSQLFunctionName(databaseIdentifierQuoteString+ col.getName()+databaseIdentifierQuoteString));
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1"));
sql.append(ddlg.caseWhenNull( databaseIdentifierQuoteString+ col.getName()+ databaseIdentifierQuoteString, "1"));
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName));
if ( tryCount > 0 ) { sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); }
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName));
else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName );
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); }
rs.close(); rs = null;
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null;
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
rs.close(); rs = null;
private ColumnProfileResult execProfileFunction(ProfileFunctionDescriptor pfd, SQLColumn col, DDLGenerator ddlg, Connection conn) throws SQLException { long createStartTime = System.currentTimeMillis(); int i = 0; StringBuffer sql = new StringBuffer(); Statement stmt = null; ResultSet rs = null; String lastSQL = null; String columnName = null; String databaseIdentifierQuoteString = null; try { databaseIdentifierQuoteString = conn.getMetaData().getIdentifierQuoteString(); sql.append("SELECT 1"); if (findingDistinctCount && pfd.isCountDist() ) { sql.append(",\n COUNT(DISTINCT \""); sql.append(col.getName()); sql.append("\") AS DISTINCTCOUNT_"+i); } if (findingMin && pfd.isMinValue() ) { sql.append(",\n MIN(\""); sql.append(col.getName()); sql.append("\") AS MINVALUE_"+i); } if (findingMax && pfd.isMaxValue() ) { sql.append(",\n MAX(\""); sql.append(col.getName()); sql.append("\") AS MAXVALUE_"+i); } if (findingAvg && pfd.isAvgValue() ) { sql.append(",\n "); sql.append(ddlg.getAverageSQLFunctionName("\""+col.getName()+"\"")); sql.append(" AS AVGVALUE_"+i); } if (findingMinLength && pfd.isMinLength() ) { sql.append(",\n MIN("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MINLENGTH_"+i); } if (findingMaxLength && pfd.isMaxLength() ) { sql.append(",\n MAX("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS MAXLENGTH_"+i); } if (findingAvgLength && pfd.isAvgLength() ) { sql.append(",\n AVG("); sql.append(ddlg.getStringLengthSQLFunctionName("\""+col.getName()+"\"")); sql.append(") AS AVGLENGTH_"+i); } if ( findingNullCount && pfd.isSumDecode() ) { sql.append(",\n SUM("); sql.append(ddlg.caseWhen("\""+col.getName()+"\"", "NULL", "1")); sql.append(") AS NULLCOUNT_"+i); } SQLTable table = col.getParentTable(); sql.append("\n FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); stmt = conn.createStatement(); stmt.setEscapeProcessing(false); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); ColumnProfileResult colResult = new ColumnProfileResult(col); colResult.setCreateStartTime(createStartTime); if ( rs.next() ) { if (findingDistinctCount && pfd.isCountDist() ) { columnName = "DISTINCTCOUNT_"+i; colResult.setDistinctValueCount(rs.getInt(columnName)); } if (findingMin && pfd.isMinValue() ) { columnName = "MINVALUE_"+i; colResult.setMinValue(rs.getObject(columnName)); } if (findingMax && pfd.isMaxValue() ) { columnName = "MAXVALUE_"+i; colResult.setMaxValue(rs.getObject(columnName)); } if (findingAvg && pfd.isAvgValue() ) { columnName = "AVGVALUE_"+i; colResult.setAvgValue(rs.getObject(columnName)); } if (findingMinLength && pfd.isMinLength() ) { columnName = "MINLENGTH_"+i; colResult.setMinLength(rs.getInt(columnName)); } if (findingMaxLength && pfd.isMaxLength() ) { columnName = "MAXLENGTH_"+i; colResult.setMaxLength(rs.getInt(columnName)); } if (findingAvgLength && pfd.isAvgLength() ) { columnName = "AVGLENGTH_"+i; colResult.setAvgLength(rs.getDouble(columnName)); } if ( findingNullCount && pfd.isSumDecode() ) { columnName = "NULLCOUNT_"+i; colResult.setNullCount(rs.getInt(columnName)); } } else { throw new IllegalStateException("Query executed, but returns no rows:\n" + lastSQL + "\nColumn Name: " + columnName ); } rs.close(); rs = null; if (findingTopTen && pfd.isCountDist() ) { sql = new StringBuffer(); sql.append("SELECT ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" AS MYVALUE, COUNT(*) AS COUNT1 FROM "); sql.append(DDLUtils.toQualifiedName(table.getCatalogName(), table.getSchemaName(), table.getName(), databaseIdentifierQuoteString, databaseIdentifierQuoteString)); sql.append(" GROUP BY ").append(databaseIdentifierQuoteString); sql.append(col.getName()).append(databaseIdentifierQuoteString); sql.append(" ORDER BY COUNT1 DESC"); lastSQL = sql.toString(); rs = stmt.executeQuery(lastSQL); for ( int n=0; rs.next() && n < topNCount; n++ ) { colResult.addValueCount(rs.getObject("MYVALUE"), rs.getInt("COUNT1")); } } colResult.setCreateEndTime(System.currentTimeMillis()); return colResult; } finally { try { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } catch (SQLException ex) { logger.error("Couldn't clean up result set", ex); } } }
public void remove(SQLObject sqo){
public void remove(SQLObject sqo) throws ArchitectException{
public void remove(SQLObject sqo){ results.remove(sqo); }
if ( sqo instanceof SQLTable ) { for ( SQLColumn col: ((SQLTable)sqo).getColumns()) { results.remove(col); } } else if ( sqo instanceof SQLColumn ) { SQLTable table = ((SQLColumn)sqo).getParentTable(); boolean allColumnDeleted = true; for ( SQLColumn col: table.getColumns()) { if ( getResult(col) != null ) { allColumnDeleted = false; break; } } if ( allColumnDeleted ) results.remove(table); }
public void remove(SQLObject sqo){ results.remove(sqo); }
userCancel = true;
userCancel = cancelled;
public void setCancelled(boolean cancelled) { synchronized (monitorableMutex) { userCancel = true; } }
InputStream in = context.getResourceAsStream(uri); return getSAXReader().read(in, uri);
source = context.getResource(uri); } if (source instanceof URL) { return getSAXReader().read((URL) source);
protected Document parse(Object source) throws Exception { // #### we should allow parsing to output XML events to // the output if no var is specified if (source instanceof String) { String uri = (String) source; InputStream in = context.getResourceAsStream(uri); return getSAXReader().read(in, uri); } else if (source instanceof Reader) { return getSAXReader().read((Reader) source); } else if (source instanceof InputStream) { return getSAXReader().read((InputStream) source); } else if (source instanceof URL) { return getSAXReader().read((URL) source); } else { throw new IllegalArgumentException( "Invalid source argument. Must be a String, Reader, InputStream or URL." + " Was type; " + source.getClass().getName() + " with value: " + source); } }
} else if (source instanceof URL) { return getSAXReader().read((URL) source);
protected Document parse(Object source) throws Exception { // #### we should allow parsing to output XML events to // the output if no var is specified if (source instanceof String) { String uri = (String) source; InputStream in = context.getResourceAsStream(uri); return getSAXReader().read(in, uri); } else if (source instanceof Reader) { return getSAXReader().read((Reader) source); } else if (source instanceof InputStream) { return getSAXReader().read((InputStream) source); } else if (source instanceof URL) { return getSAXReader().read((URL) source); } else { throw new IllegalArgumentException( "Invalid source argument. Must be a String, Reader, InputStream or URL." + " Was type; " + source.getClass().getName() + " with value: " + source); } }
double overThresh = crossVal*100 - mycolorThresh; float lineRed, lineGreen, lineBlue; if(overThresh < (50-mycolorThresh)/2){ lineRed=0.0f; lineBlue=new Double(0.9-((overThresh/((50-mycolorThresh)/2))*0.9)).floatValue(); lineGreen=0.9f-lineBlue;
Color lineColor; if (crossVal > 0.25){ lineColor = new Color(255,0,0); }else if (crossVal > 0.20){ lineColor = new Color(255,153,0); }else if (crossVal > 0.15){ lineColor = new Color(0,204,0); }else if (crossVal > 0.5){ lineColor = new Color(0,51,204);
public void haploDraw(Graphics gr, boolean myuseThickness, int mycolorThresh, int mycrossThinThresh, int mycrossThickThresh, double[] gapDPrime, Haplotype[][] hapsInBlocks){ Graphics2D g = (Graphics2D) gr; final BasicStroke stroke = new BasicStroke(1.0f); final BasicStroke wideStroke = new BasicStroke(2.0f); final int verticalOffset = 43; final Font nonMonoFont = new Font("Lucida Bright", Font.PLAIN, 12); final Font regFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 12); final Font smallFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 7); final Font boldFont = new Font("Lucida Bright", Font.BOLD, 12); FontMetrics regfm = g.getFontMetrics(regFont); FontMetrics nonMonofm = g.getFontMetrics(nonMonoFont); FontMetrics boldfm = g.getFontMetrics(boldFont); String theHap = new String(); int x = 10; int y = verticalOffset; int totalWidth = 0; NumberFormat nf = NumberFormat.getInstance(); NumberFormat nfMulti = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); nfMulti.setMinimumFractionDigits(2); nfMulti.setMaximumFractionDigits(2); int[][]lookupPos = new int[hapsInBlocks.length][]; for (int p = 0; p < lookupPos.length; p++){ lookupPos[p] = new int[hapsInBlocks[p].length]; for (int q = 0; q < lookupPos[p].length; q++){ lookupPos[p][hapsInBlocks[p][q].getListOrder()] = q; //System.out.println(p + " " + q + " " + hapsInBlocks[p][q].getListOrder()); } } Dimension theDimension = haploGetPreferredSize(hapsInBlocks, gr); int windowX = (int)theDimension.getWidth(); int windowY = (int)theDimension.getHeight(); g.setColor(Color.white); g.fillRect(0,0,windowX,windowY); g.setColor(Color.black); for (int i = 0; i < hapsInBlocks.length; i++){ int[] markerNums = hapsInBlocks[i][0].getMarkers(); boolean[] tags = hapsInBlocks[i][0].getTags(); int headerX = x; for (int z = 0; z < markerNums.length; z++){ //put tag snps in red if (tags[z]) { g.setColor(Color.red); } //write labels with more than one digit vertically if (markerNums[z]+1 < 10){ g.setFont(regFont); g.drawString(String.valueOf(markerNums[z]+1), headerX, 18); headerX += (regfm.stringWidth(String.valueOf(markerNums[z]+1))); }else { int ones = (markerNums[z]+1)%10; int tens = (((markerNums[z]+1)-ones)%100)/10; g.setFont(regFont); g.drawString(String.valueOf(ones), headerX, 18); g.setFont(smallFont); g.drawString(String.valueOf(tens), headerX-2, 20-regfm.getAscent()); headerX += (regfm.stringWidth(String.valueOf(ones))); } g.setColor(Color.black); } for (int j = 0; j < hapsInBlocks[i].length; j++){ int curHapNum = lookupPos[i][j]; theHap = new String(); String thePercentage = new String(); int[] theGeno = hapsInBlocks[i][curHapNum].getGeno(); for (int k = 0; k < theGeno.length; k++){ //if we don't know what one of the alleles for a marker is, use "x" if (theGeno[k] == 8){ theHap += "x"; }else{ theHap += theGeno[k]; } } //draw the haplotype in mono font g.setFont(regFont); g.drawString(theHap, x, y); //draw the percentage value in non mono font thePercentage = " (" + nf.format(hapsInBlocks[i][curHapNum].getPercentage()) + ")"; g.setFont(nonMonoFont); g.drawString(thePercentage, x+regfm.stringWidth(theHap), y); totalWidth = regfm.stringWidth(theHap) + nonMonofm.stringWidth(thePercentage); if (i < hapsInBlocks.length - 1){ //draw crossovers for (int crossCount = 0; crossCount < hapsInBlocks[i+1].length; crossCount++){ double crossVal = hapsInBlocks[i][curHapNum].getCrossover(crossCount); if (myuseThickness){ //draw thin and thick lines if (crossVal*100 > mycrossThinThresh){ if (crossVal*100 > mycrossThickThresh){ g.setStroke(wideStroke); }else{ g.setStroke(stroke); } //this arcane formula draws lines neatly from one hap to another g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); } }else{ //draw colored lines if(crossVal*100 > mycolorThresh){ g.setStroke(stroke); double overThresh = crossVal*100 - mycolorThresh; float lineRed, lineGreen, lineBlue; if(overThresh < (50-mycolorThresh)/2){ //cold colors lineRed=0.0f; lineBlue=new Double(0.9-((overThresh/((50-mycolorThresh)/2))*0.9)).floatValue(); lineGreen=0.9f-lineBlue; }else{ //hot colors lineBlue=0.0f; lineRed=new Double(((overThresh-(50-mycolorThresh)/2)/((50-mycolorThresh)/2))*0.9).floatValue(); lineGreen=0.9f-lineRed; } Color lineColor = new Color(lineRed, lineGreen, lineBlue); g.setColor(lineColor); g.setStroke(new BasicStroke(1.5f)); g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); g.setColor(Color.black); g.setStroke(stroke); } } } } y += (regfm.getHeight()+5); } //add the multilocus d prime if appropriate if (i < hapsInBlocks.length - 1){ int multiX = x +totalWidth+3; g.setStroke(wideStroke); g.setFont(boldFont); g.drawRect(multiX, windowY-boldfm.getAscent()-4, boldfm.stringWidth("8.88")+3, boldfm.getAscent()+3); g.drawString(String.valueOf(nfMulti.format(gapDPrime[i])), multiX+2, windowY - 3); g.setStroke(stroke); } x += (totalWidth + 40); y = verticalOffset; } }
lineBlue=0.0f; lineRed=new Double(((overThresh-(50-mycolorThresh)/2)/((50-mycolorThresh)/2))*0.9).floatValue(); lineGreen=0.9f-lineRed;
lineColor = new Color(0,0,153);
public void haploDraw(Graphics gr, boolean myuseThickness, int mycolorThresh, int mycrossThinThresh, int mycrossThickThresh, double[] gapDPrime, Haplotype[][] hapsInBlocks){ Graphics2D g = (Graphics2D) gr; final BasicStroke stroke = new BasicStroke(1.0f); final BasicStroke wideStroke = new BasicStroke(2.0f); final int verticalOffset = 43; final Font nonMonoFont = new Font("Lucida Bright", Font.PLAIN, 12); final Font regFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 12); final Font smallFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 7); final Font boldFont = new Font("Lucida Bright", Font.BOLD, 12); FontMetrics regfm = g.getFontMetrics(regFont); FontMetrics nonMonofm = g.getFontMetrics(nonMonoFont); FontMetrics boldfm = g.getFontMetrics(boldFont); String theHap = new String(); int x = 10; int y = verticalOffset; int totalWidth = 0; NumberFormat nf = NumberFormat.getInstance(); NumberFormat nfMulti = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); nfMulti.setMinimumFractionDigits(2); nfMulti.setMaximumFractionDigits(2); int[][]lookupPos = new int[hapsInBlocks.length][]; for (int p = 0; p < lookupPos.length; p++){ lookupPos[p] = new int[hapsInBlocks[p].length]; for (int q = 0; q < lookupPos[p].length; q++){ lookupPos[p][hapsInBlocks[p][q].getListOrder()] = q; //System.out.println(p + " " + q + " " + hapsInBlocks[p][q].getListOrder()); } } Dimension theDimension = haploGetPreferredSize(hapsInBlocks, gr); int windowX = (int)theDimension.getWidth(); int windowY = (int)theDimension.getHeight(); g.setColor(Color.white); g.fillRect(0,0,windowX,windowY); g.setColor(Color.black); for (int i = 0; i < hapsInBlocks.length; i++){ int[] markerNums = hapsInBlocks[i][0].getMarkers(); boolean[] tags = hapsInBlocks[i][0].getTags(); int headerX = x; for (int z = 0; z < markerNums.length; z++){ //put tag snps in red if (tags[z]) { g.setColor(Color.red); } //write labels with more than one digit vertically if (markerNums[z]+1 < 10){ g.setFont(regFont); g.drawString(String.valueOf(markerNums[z]+1), headerX, 18); headerX += (regfm.stringWidth(String.valueOf(markerNums[z]+1))); }else { int ones = (markerNums[z]+1)%10; int tens = (((markerNums[z]+1)-ones)%100)/10; g.setFont(regFont); g.drawString(String.valueOf(ones), headerX, 18); g.setFont(smallFont); g.drawString(String.valueOf(tens), headerX-2, 20-regfm.getAscent()); headerX += (regfm.stringWidth(String.valueOf(ones))); } g.setColor(Color.black); } for (int j = 0; j < hapsInBlocks[i].length; j++){ int curHapNum = lookupPos[i][j]; theHap = new String(); String thePercentage = new String(); int[] theGeno = hapsInBlocks[i][curHapNum].getGeno(); for (int k = 0; k < theGeno.length; k++){ //if we don't know what one of the alleles for a marker is, use "x" if (theGeno[k] == 8){ theHap += "x"; }else{ theHap += theGeno[k]; } } //draw the haplotype in mono font g.setFont(regFont); g.drawString(theHap, x, y); //draw the percentage value in non mono font thePercentage = " (" + nf.format(hapsInBlocks[i][curHapNum].getPercentage()) + ")"; g.setFont(nonMonoFont); g.drawString(thePercentage, x+regfm.stringWidth(theHap), y); totalWidth = regfm.stringWidth(theHap) + nonMonofm.stringWidth(thePercentage); if (i < hapsInBlocks.length - 1){ //draw crossovers for (int crossCount = 0; crossCount < hapsInBlocks[i+1].length; crossCount++){ double crossVal = hapsInBlocks[i][curHapNum].getCrossover(crossCount); if (myuseThickness){ //draw thin and thick lines if (crossVal*100 > mycrossThinThresh){ if (crossVal*100 > mycrossThickThresh){ g.setStroke(wideStroke); }else{ g.setStroke(stroke); } //this arcane formula draws lines neatly from one hap to another g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); } }else{ //draw colored lines if(crossVal*100 > mycolorThresh){ g.setStroke(stroke); double overThresh = crossVal*100 - mycolorThresh; float lineRed, lineGreen, lineBlue; if(overThresh < (50-mycolorThresh)/2){ //cold colors lineRed=0.0f; lineBlue=new Double(0.9-((overThresh/((50-mycolorThresh)/2))*0.9)).floatValue(); lineGreen=0.9f-lineBlue; }else{ //hot colors lineBlue=0.0f; lineRed=new Double(((overThresh-(50-mycolorThresh)/2)/((50-mycolorThresh)/2))*0.9).floatValue(); lineGreen=0.9f-lineRed; } Color lineColor = new Color(lineRed, lineGreen, lineBlue); g.setColor(lineColor); g.setStroke(new BasicStroke(1.5f)); g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); g.setColor(Color.black); g.setStroke(stroke); } } } } y += (regfm.getHeight()+5); } //add the multilocus d prime if appropriate if (i < hapsInBlocks.length - 1){ int multiX = x +totalWidth+3; g.setStroke(wideStroke); g.setFont(boldFont); g.drawRect(multiX, windowY-boldfm.getAscent()-4, boldfm.stringWidth("8.88")+3, boldfm.getAscent()+3); g.drawString(String.valueOf(nfMulti.format(gapDPrime[i])), multiX+2, windowY - 3); g.setStroke(stroke); } x += (totalWidth + 40); y = verticalOffset; } }
Color lineColor = new Color(lineRed, lineGreen, lineBlue);
public void haploDraw(Graphics gr, boolean myuseThickness, int mycolorThresh, int mycrossThinThresh, int mycrossThickThresh, double[] gapDPrime, Haplotype[][] hapsInBlocks){ Graphics2D g = (Graphics2D) gr; final BasicStroke stroke = new BasicStroke(1.0f); final BasicStroke wideStroke = new BasicStroke(2.0f); final int verticalOffset = 43; final Font nonMonoFont = new Font("Lucida Bright", Font.PLAIN, 12); final Font regFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 12); final Font smallFont = new Font("Lucida Sans Typewriter", Font.PLAIN, 7); final Font boldFont = new Font("Lucida Bright", Font.BOLD, 12); FontMetrics regfm = g.getFontMetrics(regFont); FontMetrics nonMonofm = g.getFontMetrics(nonMonoFont); FontMetrics boldfm = g.getFontMetrics(boldFont); String theHap = new String(); int x = 10; int y = verticalOffset; int totalWidth = 0; NumberFormat nf = NumberFormat.getInstance(); NumberFormat nfMulti = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); nfMulti.setMinimumFractionDigits(2); nfMulti.setMaximumFractionDigits(2); int[][]lookupPos = new int[hapsInBlocks.length][]; for (int p = 0; p < lookupPos.length; p++){ lookupPos[p] = new int[hapsInBlocks[p].length]; for (int q = 0; q < lookupPos[p].length; q++){ lookupPos[p][hapsInBlocks[p][q].getListOrder()] = q; //System.out.println(p + " " + q + " " + hapsInBlocks[p][q].getListOrder()); } } Dimension theDimension = haploGetPreferredSize(hapsInBlocks, gr); int windowX = (int)theDimension.getWidth(); int windowY = (int)theDimension.getHeight(); g.setColor(Color.white); g.fillRect(0,0,windowX,windowY); g.setColor(Color.black); for (int i = 0; i < hapsInBlocks.length; i++){ int[] markerNums = hapsInBlocks[i][0].getMarkers(); boolean[] tags = hapsInBlocks[i][0].getTags(); int headerX = x; for (int z = 0; z < markerNums.length; z++){ //put tag snps in red if (tags[z]) { g.setColor(Color.red); } //write labels with more than one digit vertically if (markerNums[z]+1 < 10){ g.setFont(regFont); g.drawString(String.valueOf(markerNums[z]+1), headerX, 18); headerX += (regfm.stringWidth(String.valueOf(markerNums[z]+1))); }else { int ones = (markerNums[z]+1)%10; int tens = (((markerNums[z]+1)-ones)%100)/10; g.setFont(regFont); g.drawString(String.valueOf(ones), headerX, 18); g.setFont(smallFont); g.drawString(String.valueOf(tens), headerX-2, 20-regfm.getAscent()); headerX += (regfm.stringWidth(String.valueOf(ones))); } g.setColor(Color.black); } for (int j = 0; j < hapsInBlocks[i].length; j++){ int curHapNum = lookupPos[i][j]; theHap = new String(); String thePercentage = new String(); int[] theGeno = hapsInBlocks[i][curHapNum].getGeno(); for (int k = 0; k < theGeno.length; k++){ //if we don't know what one of the alleles for a marker is, use "x" if (theGeno[k] == 8){ theHap += "x"; }else{ theHap += theGeno[k]; } } //draw the haplotype in mono font g.setFont(regFont); g.drawString(theHap, x, y); //draw the percentage value in non mono font thePercentage = " (" + nf.format(hapsInBlocks[i][curHapNum].getPercentage()) + ")"; g.setFont(nonMonoFont); g.drawString(thePercentage, x+regfm.stringWidth(theHap), y); totalWidth = regfm.stringWidth(theHap) + nonMonofm.stringWidth(thePercentage); if (i < hapsInBlocks.length - 1){ //draw crossovers for (int crossCount = 0; crossCount < hapsInBlocks[i+1].length; crossCount++){ double crossVal = hapsInBlocks[i][curHapNum].getCrossover(crossCount); if (myuseThickness){ //draw thin and thick lines if (crossVal*100 > mycrossThinThresh){ if (crossVal*100 > mycrossThickThresh){ g.setStroke(wideStroke); }else{ g.setStroke(stroke); } //this arcane formula draws lines neatly from one hap to another g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); } }else{ //draw colored lines if(crossVal*100 > mycolorThresh){ g.setStroke(stroke); double overThresh = crossVal*100 - mycolorThresh; float lineRed, lineGreen, lineBlue; if(overThresh < (50-mycolorThresh)/2){ //cold colors lineRed=0.0f; lineBlue=new Double(0.9-((overThresh/((50-mycolorThresh)/2))*0.9)).floatValue(); lineGreen=0.9f-lineBlue; }else{ //hot colors lineBlue=0.0f; lineRed=new Double(((overThresh-(50-mycolorThresh)/2)/((50-mycolorThresh)/2))*0.9).floatValue(); lineGreen=0.9f-lineRed; } Color lineColor = new Color(lineRed, lineGreen, lineBlue); g.setColor(lineColor); g.setStroke(new BasicStroke(1.5f)); g.draw(new Line2D.Double((x+totalWidth+3), (y-regfm.getAscent()/2), (x+totalWidth+37), (verticalOffset-regfm.getAscent()/2+((regfm.getHeight()+5)*hapsInBlocks[i+1][crossCount].getListOrder())))); g.setColor(Color.black); g.setStroke(stroke); } } } } y += (regfm.getHeight()+5); } //add the multilocus d prime if appropriate if (i < hapsInBlocks.length - 1){ int multiX = x +totalWidth+3; g.setStroke(wideStroke); g.setFont(boldFont); g.drawRect(multiX, windowY-boldfm.getAscent()-4, boldfm.stringWidth("8.88")+3, boldfm.getAscent()+3); g.drawString(String.valueOf(nfMulti.format(gapDPrime[i])), multiX+2, windowY - 3); g.setStroke(stroke); } x += (totalWidth + 40); y = verticalOffset; } }
"Edited user "+user.getName()+"/"+user.getPassword());
"Edited user "+user.getName());
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception { AccessController.canAccess(context.getUser(), ACL_EDIT_USERS); User user = buildUser(actionForm); UserManager.getInstance().updateUser(user); UserActivityLogger.getInstance().logActivity( context.getUser().getUsername(), "Edited user "+user.getName()+"/"+user.getPassword()); return mapping.findForward(Forwards.SUCCESS); }
ProfileResult old = results.remove(sqo);
results.remove(sqo);
public void remove(SQLObject sqo) throws ArchitectException{ ProfileResult old = results.remove(sqo); if ( sqo instanceof SQLTable ) { for ( SQLColumn col: ((SQLTable)sqo).getColumns()) { results.remove(col); } } else if ( sqo instanceof SQLColumn ) { SQLTable table = ((SQLColumn)sqo).getParentTable(); boolean allColumnDeleted = true; for ( SQLColumn col: table.getColumns()) { if ( getResult(col) != null ) { allColumnDeleted = false; break; } } if ( allColumnDeleted ){ results.remove(table); } } fireProfileRemovedEvent(new ProfileChangeEvent(this, null)); }
newOutput.setContentHandler( new SafeContentHandler(newOutput.getContentHandler()) );
protected void writeBody(Writer writer) throws SAXException, JellyTagException { XMLOutput newOutput = createXMLOutput(writer); try { newOutput.startDocument(); invokeBody(newOutput); newOutput.endDocument(); } finally { try { newOutput.close(); } catch (IOException e) {} } }
Map paramValues = new HashMap(); if(appForm.getJndiFactory() != null) paramValues.put(ApplicationConfig.JNDI_FACTORY, appForm.getJndiFactory()); if(appForm.getJndiURL() != null) paramValues.put(ApplicationConfig.JNDI_URL, appForm.getJndiURL()); appConfigData.setParamValues(paramValues);
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception{ ApplicationForm appForm = (ApplicationForm)actionForm; /* create ApplicationConfigData from this form */ ApplicationConfigData appConfigData = new ApplicationConfigData(); CoreUtils.copyProperties(appConfigData, appForm); ConfigurationService service = ServiceFactory.getConfigurationService(); service.addApplication(Utils.getServiceContext(context), appConfigData); return mapping.findForward(Forwards.SUCCESS); }
return flavors[i];
best = flavors[i]; } else { logger.debug("NO!");
public DataFlavor bestImportFlavor(JComponent c, DataFlavor[] flavors) { logger.debug("PlayPenTransferHandler: can I import "+Arrays.asList(flavors)); for (int i = 0; i < flavors.length; i++) { String cls = flavors[i].getDefaultRepresentationClassAsString(); logger.debug("representation class = "+cls); logger.debug("mime type = "+flavors[i].getMimeType()); logger.debug("type = "+flavors[i].getPrimaryType()); logger.debug("subtype = "+flavors[i].getSubType()); logger.debug("class = "+flavors[i].getParameter("class")); logger.debug("isSerializedObject = "+flavors[i].isFlavorSerializedObjectType()); logger.debug("isInputStream = "+flavors[i].isRepresentationClassInputStream()); logger.debug("isRemoteObject = "+flavors[i].isFlavorRemoteObjectType()); logger.debug("isLocalObject = "+flavors[i].getMimeType().equals(DataFlavor.javaJVMLocalObjectMimeType)); if (flavors[i].equals(SQLObjectTransferable.flavor) || flavors[i].equals(SQLObjectListTransferable.flavor)) { logger.debug("YES"); return flavors[i]; } } logger.debug("NO!"); return null; }
logger.debug("NO!"); return null;
return best;
public DataFlavor bestImportFlavor(JComponent c, DataFlavor[] flavors) { logger.debug("PlayPenTransferHandler: can I import "+Arrays.asList(flavors)); for (int i = 0; i < flavors.length; i++) { String cls = flavors[i].getDefaultRepresentationClassAsString(); logger.debug("representation class = "+cls); logger.debug("mime type = "+flavors[i].getMimeType()); logger.debug("type = "+flavors[i].getPrimaryType()); logger.debug("subtype = "+flavors[i].getSubType()); logger.debug("class = "+flavors[i].getParameter("class")); logger.debug("isSerializedObject = "+flavors[i].isFlavorSerializedObjectType()); logger.debug("isInputStream = "+flavors[i].isRepresentationClassInputStream()); logger.debug("isRemoteObject = "+flavors[i].isFlavorRemoteObjectType()); logger.debug("isLocalObject = "+flavors[i].getMimeType().equals(DataFlavor.javaJVMLocalObjectMimeType)); if (flavors[i].equals(SQLObjectTransferable.flavor) || flavors[i].equals(SQLObjectListTransferable.flavor)) { logger.debug("YES"); return flavors[i]; } } logger.debug("NO!"); return null; }
logger.debug("MyJTreeTransferHandler.importData: got object of type "+someData.getClass().getName());
logger.debug("MyJTreeTransferHandler.importData: got object of type "+someData.getClass().getName()+" @"+someData.hashCode());
public void drop(DropTargetDropEvent dtde) { Transferable t = dtde.getTransferable(); PlayPen c = (PlayPen) dtde.getDropTargetContext().getComponent(); DataFlavor importFlavor = bestImportFlavor(c, t.getTransferDataFlavors()); if (importFlavor == null) { dtde.rejectDrop(); } else { try { Object someData = t.getTransferData(importFlavor); logger.debug("MyJTreeTransferHandler.importData: got object of type "+someData.getClass().getName()); if (someData instanceof SQLTable) { dtde.acceptDrop(DnDConstants.ACTION_COPY); c.addTable((SQLTable) someData, dtde.getLocation()); dtde.dropComplete(true); return; } else if (someData instanceof SQLSchema) { dtde.acceptDrop(DnDConstants.ACTION_COPY); SQLSchema sourceSchema = (SQLSchema) someData; c.addSchema(sourceSchema, dtde.getLocation()); dtde.dropComplete(true); return; } else if (someData instanceof SQLCatalog) { dtde.acceptDrop(DnDConstants.ACTION_COPY); SQLCatalog sourceCatalog = (SQLCatalog) someData; Iterator cit = sourceCatalog.getChildren().iterator(); if (sourceCatalog.isSchemaContainer()) { while (cit.hasNext()) { SQLSchema sourceSchema = (SQLSchema) cit.next(); c.addSchema(sourceSchema, dtde.getLocation()); } } else { while (cit.hasNext()) { SQLTable sourceTable = (SQLTable) cit.next(); c.addTable(sourceTable, dtde.getLocation()); } } dtde.dropComplete(true); return; } else if (someData instanceof SQLColumn) { dtde.acceptDrop(DnDConstants.ACTION_COPY); SQLColumn column = (SQLColumn) someData; JLabel colName = new JLabel(column.getColumnName()); colName.setSize(colName.getPreferredSize()); c.add(colName, dtde.getLocation()); logger.debug("Added "+column.getColumnName()+" to playpen (temporary, only for testing)"); colName.revalidate(); dtde.dropComplete(true); return; } else if (someData instanceof SQLObject[]) { // needs work (should use addSchema()) dtde.acceptDrop(DnDConstants.ACTION_COPY); SQLObject[] objects = (SQLObject[]) someData; for (int i = 0; i < objects.length; i++) { if (objects[i] instanceof SQLTable) { c.addTable((SQLTable) objects[i], dtde.getLocation()); } else if (objects[i] instanceof SQLSchema) { c.addSchema((SQLSchema) objects[i], dtde.getLocation()); } else { logger.warn("Unsupported object in multi-item drop: " +objects[i]); } } dtde.dropComplete(true); return; } else { dtde.rejectDrop(); } } catch (UnsupportedFlavorException ufe) { ufe.printStackTrace(); dtde.rejectDrop(); } catch (IOException ioe) { ioe.printStackTrace(); dtde.rejectDrop(); } catch (InvalidDnDOperationException ex) { ex.printStackTrace(); dtde.rejectDrop(); } catch (ArchitectException ex) { ex.printStackTrace(); dtde.rejectDrop(); } } }
public PlayPen(SQLDatabase db) { super(); if (db == null) throw new NullPointerException("db must be non-null"); this.db = db; relationships = new LinkedList(); try { ArchitectUtils.listenToHierarchy(this, db); } catch (ArchitectException ex) { logger.error("Couldn't listen to database", ex); }
public PlayPen() {
public PlayPen(SQLDatabase db) { super(); if (db == null) throw new NullPointerException("db must be non-null"); this.db = db; relationships = new LinkedList(); try { ArchitectUtils.listenToHierarchy(this, db); } catch (ArchitectException ex) { logger.error("Couldn't listen to database", ex); } setLayout(new PlayPenLayout(this)); setName("Play Pen"); setMinimumSize(new Dimension(200,200)); setBackground(java.awt.Color.white); setOpaque(false); // XXX: it really is opaque, but we can't have super.paintComponent() painting over top of our relationship lines dt = new DropTarget(this, new PlayPenDropListener()); tableNames = new HashMap(); addContainerListener(this); setupTablePanePopup(); }
tableNames = new HashMap();
public PlayPen(SQLDatabase db) { super(); if (db == null) throw new NullPointerException("db must be non-null"); this.db = db; relationships = new LinkedList(); try { ArchitectUtils.listenToHierarchy(this, db); } catch (ArchitectException ex) { logger.error("Couldn't listen to database", ex); } setLayout(new PlayPenLayout(this)); setName("Play Pen"); setMinimumSize(new Dimension(200,200)); setBackground(java.awt.Color.white); setOpaque(false); // XXX: it really is opaque, but we can't have super.paintComponent() painting over top of our relationship lines dt = new DropTarget(this, new PlayPenDropListener()); tableNames = new HashMap(); addContainerListener(this); setupTablePanePopup(); }
this.sendLetter((EmailLetter)letters.iterator().next(),topic);
Email eEmail =MailFinder.getInstance().lookupEmail(email); this.sendLetter((EmailLetter)letters.iterator().next(),topic,eEmail);
public void sendWelcomeLetters(MailTopic topic,String email)throws RemoteException,FinderException{ Collection letters = MailFinder.getInstance().getEmailLetters(((Integer)topic.getPrimaryKey()).intValue(),EmailLetter.TYPE_SUBSCRIPTION); if(letters!=null &&!letters.isEmpty()){ this.sendLetter((EmailLetter)letters.iterator().next(),topic); } }
if (logger.isDebugEnabled()) { mi = new JMenuItem(new AbstractAction("Show Mappings") { public void actionPerformed(ActionEvent e) { StringBuffer componentList = new StringBuffer(); for ( ColumnMapping columnMap : getModel().getMappings()) { componentList.append(columnMap).append("\n"); } JOptionPane.showMessageDialog(ArchitectFrame.getMainInstance(), new JScrollPane(new JTextArea(componentList.toString()))); } }); popup.add(mi); }
protected void createPopup() { ArchitectFrame af = ArchitectFrame.getMainInstance(); popup = new JPopupMenu(); JMenuItem mi; mi = new JMenuItem(af.editRelationshipAction); mi.setActionCommand(ArchitectSwingConstants.ACTION_COMMAND_SRC_PLAYPEN); popup.add(mi); mi = new JMenuItem(af.deleteSelectedAction); mi.setActionCommand(ArchitectSwingConstants.ACTION_COMMAND_SRC_PLAYPEN); popup.add(mi); }
StringTokenizer enum = new StringTokenizer(value, ",");
StringTokenizer items = new StringTokenizer(value, ",");
protected RGB parseRGB(String value) { StringTokenizer enum = new StringTokenizer(value, ","); int red = 0; int green = 0; int blue = 0; if (enum.hasMoreTokens()) { red = parseNumber(enum.nextToken()); }
if (enum.hasMoreTokens()) { red = parseNumber(enum.nextToken());
if (items.hasMoreTokens()) { red = parseNumber(items.nextToken());
protected RGB parseRGB(String value) { StringTokenizer enum = new StringTokenizer(value, ","); int red = 0; int green = 0; int blue = 0; if (enum.hasMoreTokens()) { red = parseNumber(enum.nextToken()); }
if (items.hasMoreTokens()) { green = parseNumber(items.nextToken()); } if (items.hasMoreTokens()) { blue = parseNumber(items.nextToken()); } return new RGB(red, green, blue); }
protected RGB parseRGB(String value) { StringTokenizer enum = new StringTokenizer(value, ","); int red = 0; int green = 0; int blue = 0; if (enum.hasMoreTokens()) { red = parseNumber(enum.nextToken()); }
ind = new Individual(numLines);
ind = new Individual(numLines-1);
public void parseHapMap(File inFile) throws PedFileException, IOException { int colNum = -1; Vector lines = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } lines.add(line); } int numLines = lines.size(); if (numLines < 2){ throw new PedFileException("Hapmap data format error: empty file"); } Individual ind; this.allIndividuals = new Vector(); //enumerate indivs StringTokenizer st = new StringTokenizer((String)lines.get(0), "\n\t\" \""); int numMetaColumns = 0; boolean doneMeta = false; while(!doneMeta && st.hasMoreTokens()){ String thisfield = st.nextToken(); numMetaColumns++; //first indiv ID will be a string beginning with "NA" if (thisfield.startsWith("NA")){ doneMeta = true; } } numMetaColumns--; st = new StringTokenizer((String)lines.get(0), "\n\t\" \""); for (int i = 0; i < numMetaColumns; i++){ st.nextToken(); } Vector namesIncludingDups = new Vector(); StringTokenizer dt; while (st.hasMoreTokens()){ ind = new Individual(numLines); String name = st.nextToken(); namesIncludingDups.add(name); if (name.endsWith("dup")){ //skip dups (i.e. don't add 'em to ind array) continue; } String details = (String)hapMapTranslate.get(name); if (details == null){ throw new PedFileException("Hapmap data format error: " + name); } dt = new StringTokenizer(details, "\n\t\" \""); ind.setFamilyID(dt.nextToken().trim()); ind.setIndividualID(dt.nextToken().trim()); ind.setDadID(dt.nextToken().trim()); ind.setMomID(dt.nextToken().trim()); try { ind.setGender(Integer.parseInt(dt.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(dt.nextToken().trim())); }catch(NumberFormatException nfe) { throw new PedFileException("File error: invalid gender or affected status for indiv " + name); } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } //start at k=1 to skip header which we just processed above. hminfo = new String[numLines-1][]; for(int k=1;k<numLines;k++){ StringTokenizer tokenizer = new StringTokenizer((String)lines.get(k)); //reading the first line if(colNum < 0){ //only check column number count for the first line colNum = tokenizer.countTokens(); } if(colNum != tokenizer.countTokens()) { //this line has a different number of columns //should send some sort of error message //TODO: add something which stores number of markers for all lines and checks that they're consistent throw new PedFileException("Line number mismatch in input file. line " + (k+1)); } if(tokenizer.hasMoreTokens()){ hminfo[k-1] = new String[2]; for (int skip = 0; skip < numMetaColumns; skip++){ //meta-data crap String s = tokenizer.nextToken().trim(); //get marker name, chrom and pos if (skip == 0){ hminfo[k-1][0] = s; } if (skip == 2){ String dc = Chromosome.getDataChrom(); if (dc != null){ if (!dc.equalsIgnoreCase(s)){ throw new PedFileException("Hapmap file format error on line " + (k+1) + ":\n The file appears to contain multiple chromosomes:" + "\n" + dc + ", " + s); } }else{ Chromosome.setDataChrom(s); } } if (skip == 3){ hminfo[k-1][1] = s; } } int index = 0; int indexIncludingDups = -1; while(tokenizer.hasMoreTokens()){ String alleles = tokenizer.nextToken(); indexIncludingDups++; //we've skipped the dups in the ind array, so we skip their genotypes if (((String)namesIncludingDups.elementAt(indexIncludingDups)).endsWith("dup")){ continue; } ind = (Individual)allIndividuals.elementAt(index); int allele1=0, allele2=0; if (alleles.substring(0,1).equals("A")){ allele1 = 1; }else if (alleles.substring(0,1).equals("C")){ allele1 = 2; }else if (alleles.substring(0,1).equals("G")){ allele1 = 3; }else if (alleles.substring(0,1).equals("T")){ allele1 = 4; } if (alleles.substring(1,2).equals("A")){ allele2 = 1; }else if (alleles.substring(1,2).equals("C")){ allele2 = 2; }else if (alleles.substring(1,2).equals("G")){ allele2 = 3; }else if (alleles.substring(1,2).equals("T")){ allele2 = 4; } byte[] markers = new byte[2]; markers[0] = (byte)allele1; markers[1]= (byte)allele2; ind.addMarker(markers); index++; } } } }
byte[] markers = new byte[2]; markers[0] = (byte)allele1; markers[1]= (byte)allele2; ind.addMarker(markers);
ind.addMarker((byte)allele1,(byte)allele2);
public void parseHapMap(File inFile) throws PedFileException, IOException { int colNum = -1; Vector lines = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } lines.add(line); } int numLines = lines.size(); if (numLines < 2){ throw new PedFileException("Hapmap data format error: empty file"); } Individual ind; this.allIndividuals = new Vector(); //enumerate indivs StringTokenizer st = new StringTokenizer((String)lines.get(0), "\n\t\" \""); int numMetaColumns = 0; boolean doneMeta = false; while(!doneMeta && st.hasMoreTokens()){ String thisfield = st.nextToken(); numMetaColumns++; //first indiv ID will be a string beginning with "NA" if (thisfield.startsWith("NA")){ doneMeta = true; } } numMetaColumns--; st = new StringTokenizer((String)lines.get(0), "\n\t\" \""); for (int i = 0; i < numMetaColumns; i++){ st.nextToken(); } Vector namesIncludingDups = new Vector(); StringTokenizer dt; while (st.hasMoreTokens()){ ind = new Individual(numLines); String name = st.nextToken(); namesIncludingDups.add(name); if (name.endsWith("dup")){ //skip dups (i.e. don't add 'em to ind array) continue; } String details = (String)hapMapTranslate.get(name); if (details == null){ throw new PedFileException("Hapmap data format error: " + name); } dt = new StringTokenizer(details, "\n\t\" \""); ind.setFamilyID(dt.nextToken().trim()); ind.setIndividualID(dt.nextToken().trim()); ind.setDadID(dt.nextToken().trim()); ind.setMomID(dt.nextToken().trim()); try { ind.setGender(Integer.parseInt(dt.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(dt.nextToken().trim())); }catch(NumberFormatException nfe) { throw new PedFileException("File error: invalid gender or affected status for indiv " + name); } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } //start at k=1 to skip header which we just processed above. hminfo = new String[numLines-1][]; for(int k=1;k<numLines;k++){ StringTokenizer tokenizer = new StringTokenizer((String)lines.get(k)); //reading the first line if(colNum < 0){ //only check column number count for the first line colNum = tokenizer.countTokens(); } if(colNum != tokenizer.countTokens()) { //this line has a different number of columns //should send some sort of error message //TODO: add something which stores number of markers for all lines and checks that they're consistent throw new PedFileException("Line number mismatch in input file. line " + (k+1)); } if(tokenizer.hasMoreTokens()){ hminfo[k-1] = new String[2]; for (int skip = 0; skip < numMetaColumns; skip++){ //meta-data crap String s = tokenizer.nextToken().trim(); //get marker name, chrom and pos if (skip == 0){ hminfo[k-1][0] = s; } if (skip == 2){ String dc = Chromosome.getDataChrom(); if (dc != null){ if (!dc.equalsIgnoreCase(s)){ throw new PedFileException("Hapmap file format error on line " + (k+1) + ":\n The file appears to contain multiple chromosomes:" + "\n" + dc + ", " + s); } }else{ Chromosome.setDataChrom(s); } } if (skip == 3){ hminfo[k-1][1] = s; } } int index = 0; int indexIncludingDups = -1; while(tokenizer.hasMoreTokens()){ String alleles = tokenizer.nextToken(); indexIncludingDups++; //we've skipped the dups in the ind array, so we skip their genotypes if (((String)namesIncludingDups.elementAt(indexIncludingDups)).endsWith("dup")){ continue; } ind = (Individual)allIndividuals.elementAt(index); int allele1=0, allele2=0; if (alleles.substring(0,1).equals("A")){ allele1 = 1; }else if (alleles.substring(0,1).equals("C")){ allele1 = 2; }else if (alleles.substring(0,1).equals("G")){ allele1 = 3; }else if (alleles.substring(0,1).equals("T")){ allele1 = 4; } if (alleles.substring(1,2).equals("A")){ allele2 = 1; }else if (alleles.substring(1,2).equals("C")){ allele2 = 2; }else if (alleles.substring(1,2).equals("G")){ allele2 = 3; }else if (alleles.substring(1,2).equals("T")){ allele2 = 4; } byte[] markers = new byte[2]; markers[0] = (byte)allele1; markers[1]= (byte)allele2; ind.addMarker(markers); index++; } } } }
ind = new Individual(numTokens);
ind = new Individual(numMarkers);
public void parseLinkage(File inputFile) throws PedFileException, IOException { int colNum = -1; boolean withOptionalColumn = false; int numLines = 0; Individual ind; this.allIndividuals = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } StringTokenizer tokenizer = new StringTokenizer(line, "\n\t\" \""); int numTokens = tokenizer.countTokens(); //reading the first line if(colNum < 1){ //only check column number count for the first nonblank line colNum = numTokens; if(colNum%2==1) { withOptionalColumn = true; } } if(colNum != numTokens) { //this line has a different number of columns //should send some sort of error message throw new PedFileException("Column number mismatch in pedfile. line " + (numLines+1)); } ind = new Individual(numTokens); if(numTokens < 6) { throw new PedFileException("Incorrect number of fields on line " + (numLines+1)); } if(tokenizer.hasMoreTokens()){ ind.setFamilyID(tokenizer.nextToken().trim()); ind.setIndividualID(tokenizer.nextToken().trim()); ind.setDadID(tokenizer.nextToken().trim()); ind.setMomID(tokenizer.nextToken().trim()); try { ind.setGender(Integer.parseInt(tokenizer.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(tokenizer.nextToken().trim())); if(withOptionalColumn) { ind.setLiability(Integer.parseInt(tokenizer.nextToken().trim())); } }catch(NumberFormatException nfe) { throw new PedFileException("Pedfile error: invalid gender or affected status on line " + (numLines+1)); } while(tokenizer.hasMoreTokens()){ try { byte[] markers = new byte[2]; markers[0] = Byte.parseByte((tokenizer.nextToken().trim())); markers[1]= Byte.parseByte((tokenizer.nextToken().trim())); if(markers[0] <0 || markers[0] > 4 || markers[1] <0 || markers[1] >4) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) + ".\n all genotypes must be 0-4."); } ind.addMarker(markers); }catch(NumberFormatException nfe) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) ); } } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } if (fam.getMembers().containsKey(ind.getIndividualID())){ throw new PedFileException("Individual "+ind.getIndividualID()+" in family "+ ind.getFamilyID()+" appears more than once."); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } numLines++; } //now we check if anyone has a reference to a parent who isnt in the file, and if so, we remove the reference for(int i=0;i<allIndividuals.size();i++) { Individual currentInd = (Individual) allIndividuals.get(i); Hashtable curFam = ((Family)(families.get(currentInd.getFamilyID())) ).getMembers(); if( !currentInd.getDadID().equals("0") && ! (curFam.containsKey(currentInd.getDadID()))) { currentInd.setDadID("0"); bogusParents = true; } if(!currentInd.getMomID().equals("0") && ! (curFam.containsKey(currentInd.getMomID()))) { currentInd.setMomID("0"); bogusParents = true; } } if (numLines == 0){ throw new PedFileException("Data format error: empty file"); } }
ind.setFamilyID(tokenizer.nextToken().trim()); ind.setIndividualID(tokenizer.nextToken().trim()); ind.setDadID(tokenizer.nextToken().trim()); ind.setMomID(tokenizer.nextToken().trim());
ind.setFamilyID(new String(tokenizer.nextToken().trim())); ind.setIndividualID(new String(tokenizer.nextToken().trim())); ind.setDadID(new String(tokenizer.nextToken().trim())); ind.setMomID(new String(tokenizer.nextToken().trim()));
public void parseLinkage(File inputFile) throws PedFileException, IOException { int colNum = -1; boolean withOptionalColumn = false; int numLines = 0; Individual ind; this.allIndividuals = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } StringTokenizer tokenizer = new StringTokenizer(line, "\n\t\" \""); int numTokens = tokenizer.countTokens(); //reading the first line if(colNum < 1){ //only check column number count for the first nonblank line colNum = numTokens; if(colNum%2==1) { withOptionalColumn = true; } } if(colNum != numTokens) { //this line has a different number of columns //should send some sort of error message throw new PedFileException("Column number mismatch in pedfile. line " + (numLines+1)); } ind = new Individual(numTokens); if(numTokens < 6) { throw new PedFileException("Incorrect number of fields on line " + (numLines+1)); } if(tokenizer.hasMoreTokens()){ ind.setFamilyID(tokenizer.nextToken().trim()); ind.setIndividualID(tokenizer.nextToken().trim()); ind.setDadID(tokenizer.nextToken().trim()); ind.setMomID(tokenizer.nextToken().trim()); try { ind.setGender(Integer.parseInt(tokenizer.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(tokenizer.nextToken().trim())); if(withOptionalColumn) { ind.setLiability(Integer.parseInt(tokenizer.nextToken().trim())); } }catch(NumberFormatException nfe) { throw new PedFileException("Pedfile error: invalid gender or affected status on line " + (numLines+1)); } while(tokenizer.hasMoreTokens()){ try { byte[] markers = new byte[2]; markers[0] = Byte.parseByte((tokenizer.nextToken().trim())); markers[1]= Byte.parseByte((tokenizer.nextToken().trim())); if(markers[0] <0 || markers[0] > 4 || markers[1] <0 || markers[1] >4) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) + ".\n all genotypes must be 0-4."); } ind.addMarker(markers); }catch(NumberFormatException nfe) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) ); } } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } if (fam.getMembers().containsKey(ind.getIndividualID())){ throw new PedFileException("Individual "+ind.getIndividualID()+" in family "+ ind.getFamilyID()+" appears more than once."); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } numLines++; } //now we check if anyone has a reference to a parent who isnt in the file, and if so, we remove the reference for(int i=0;i<allIndividuals.size();i++) { Individual currentInd = (Individual) allIndividuals.get(i); Hashtable curFam = ((Family)(families.get(currentInd.getFamilyID())) ).getMembers(); if( !currentInd.getDadID().equals("0") && ! (curFam.containsKey(currentInd.getDadID()))) { currentInd.setDadID("0"); bogusParents = true; } if(!currentInd.getMomID().equals("0") && ! (curFam.containsKey(currentInd.getMomID()))) { currentInd.setMomID("0"); bogusParents = true; } } if (numLines == 0){ throw new PedFileException("Data format error: empty file"); } }
byte[] markers = new byte[2]; markers[0] = Byte.parseByte((tokenizer.nextToken().trim())); markers[1]= Byte.parseByte((tokenizer.nextToken().trim())); if(markers[0] <0 || markers[0] > 4 || markers[1] <0 || markers[1] >4) {
genotype1 = Byte.parseByte((tokenizer.nextToken().trim())); genotype2 = Byte.parseByte((tokenizer.nextToken().trim())); if(genotype1 <0 || genotype1 > 4 || genotype2 <0 || genotype2 >4) {
public void parseLinkage(File inputFile) throws PedFileException, IOException { int colNum = -1; boolean withOptionalColumn = false; int numLines = 0; Individual ind; this.allIndividuals = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } StringTokenizer tokenizer = new StringTokenizer(line, "\n\t\" \""); int numTokens = tokenizer.countTokens(); //reading the first line if(colNum < 1){ //only check column number count for the first nonblank line colNum = numTokens; if(colNum%2==1) { withOptionalColumn = true; } } if(colNum != numTokens) { //this line has a different number of columns //should send some sort of error message throw new PedFileException("Column number mismatch in pedfile. line " + (numLines+1)); } ind = new Individual(numTokens); if(numTokens < 6) { throw new PedFileException("Incorrect number of fields on line " + (numLines+1)); } if(tokenizer.hasMoreTokens()){ ind.setFamilyID(tokenizer.nextToken().trim()); ind.setIndividualID(tokenizer.nextToken().trim()); ind.setDadID(tokenizer.nextToken().trim()); ind.setMomID(tokenizer.nextToken().trim()); try { ind.setGender(Integer.parseInt(tokenizer.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(tokenizer.nextToken().trim())); if(withOptionalColumn) { ind.setLiability(Integer.parseInt(tokenizer.nextToken().trim())); } }catch(NumberFormatException nfe) { throw new PedFileException("Pedfile error: invalid gender or affected status on line " + (numLines+1)); } while(tokenizer.hasMoreTokens()){ try { byte[] markers = new byte[2]; markers[0] = Byte.parseByte((tokenizer.nextToken().trim())); markers[1]= Byte.parseByte((tokenizer.nextToken().trim())); if(markers[0] <0 || markers[0] > 4 || markers[1] <0 || markers[1] >4) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) + ".\n all genotypes must be 0-4."); } ind.addMarker(markers); }catch(NumberFormatException nfe) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) ); } } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } if (fam.getMembers().containsKey(ind.getIndividualID())){ throw new PedFileException("Individual "+ind.getIndividualID()+" in family "+ ind.getFamilyID()+" appears more than once."); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } numLines++; } //now we check if anyone has a reference to a parent who isnt in the file, and if so, we remove the reference for(int i=0;i<allIndividuals.size();i++) { Individual currentInd = (Individual) allIndividuals.get(i); Hashtable curFam = ((Family)(families.get(currentInd.getFamilyID())) ).getMembers(); if( !currentInd.getDadID().equals("0") && ! (curFam.containsKey(currentInd.getDadID()))) { currentInd.setDadID("0"); bogusParents = true; } if(!currentInd.getMomID().equals("0") && ! (curFam.containsKey(currentInd.getMomID()))) { currentInd.setMomID("0"); bogusParents = true; } } if (numLines == 0){ throw new PedFileException("Data format error: empty file"); } }
ind.addMarker(markers);
ind.addMarker(genotype1,genotype2);
public void parseLinkage(File inputFile) throws PedFileException, IOException { int colNum = -1; boolean withOptionalColumn = false; int numLines = 0; Individual ind; this.allIndividuals = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#")){ //skip comments continue; } StringTokenizer tokenizer = new StringTokenizer(line, "\n\t\" \""); int numTokens = tokenizer.countTokens(); //reading the first line if(colNum < 1){ //only check column number count for the first nonblank line colNum = numTokens; if(colNum%2==1) { withOptionalColumn = true; } } if(colNum != numTokens) { //this line has a different number of columns //should send some sort of error message throw new PedFileException("Column number mismatch in pedfile. line " + (numLines+1)); } ind = new Individual(numTokens); if(numTokens < 6) { throw new PedFileException("Incorrect number of fields on line " + (numLines+1)); } if(tokenizer.hasMoreTokens()){ ind.setFamilyID(tokenizer.nextToken().trim()); ind.setIndividualID(tokenizer.nextToken().trim()); ind.setDadID(tokenizer.nextToken().trim()); ind.setMomID(tokenizer.nextToken().trim()); try { ind.setGender(Integer.parseInt(tokenizer.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(tokenizer.nextToken().trim())); if(withOptionalColumn) { ind.setLiability(Integer.parseInt(tokenizer.nextToken().trim())); } }catch(NumberFormatException nfe) { throw new PedFileException("Pedfile error: invalid gender or affected status on line " + (numLines+1)); } while(tokenizer.hasMoreTokens()){ try { byte[] markers = new byte[2]; markers[0] = Byte.parseByte((tokenizer.nextToken().trim())); markers[1]= Byte.parseByte((tokenizer.nextToken().trim())); if(markers[0] <0 || markers[0] > 4 || markers[1] <0 || markers[1] >4) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) + ".\n all genotypes must be 0-4."); } ind.addMarker(markers); }catch(NumberFormatException nfe) { throw new PedFileException("Pedigree file input error: invalid genotype on line " + (numLines+1) ); } } //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } if (fam.getMembers().containsKey(ind.getIndividualID())){ throw new PedFileException("Individual "+ind.getIndividualID()+" in family "+ ind.getFamilyID()+" appears more than once."); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); this.allIndividuals.add(ind); } numLines++; } //now we check if anyone has a reference to a parent who isnt in the file, and if so, we remove the reference for(int i=0;i<allIndividuals.size();i++) { Individual currentInd = (Individual) allIndividuals.get(i); Hashtable curFam = ((Family)(families.get(currentInd.getFamilyID())) ).getMembers(); if( !currentInd.getDadID().equals("0") && ! (curFam.containsKey(currentInd.getDadID()))) { currentInd.setDadID("0"); bogusParents = true; } if(!currentInd.getMomID().equals("0") && ! (curFam.containsKey(currentInd.getMomID()))) { currentInd.setMomID("0"); bogusParents = true; } } if (numLines == 0){ throw new PedFileException("Data format error: empty file"); } }
tf = TransformerFactory.newInstance();
this.tf = (SAXTransformerFactory) TransformerFactory.newInstance();
public TransformTag() { super(); tf = TransformerFactory.newInstance(); }
log.info( "base: " + base + " href: " + href );
log.debug( "base: " + base + " href: " + href );
protected URIResolver createURIResolver() { return new URIResolver() { public Source resolve(String href, String base) throws TransformerException { if (log.isDebugEnabled() ) { log.info( "base: " + base + " href: " + href ); } // pass if we don't have a systemId if (href == null) return null; // @todo // #### this is a pretty simplistic implementation. // #### we should really handle this better such that if // #### base is specified as an absolute URL // #### we trim the end off it and append href return new StreamSource(context.getResourceAsStream(href)); } }; }
if (href == null)
if (null == href)
protected URIResolver createURIResolver() { return new URIResolver() { public Source resolve(String href, String base) throws TransformerException { if (log.isDebugEnabled() ) { log.info( "base: " + base + " href: " + href ); } // pass if we don't have a systemId if (href == null) return null; // @todo // #### this is a pretty simplistic implementation. // #### we should really handle this better such that if // #### base is specified as an absolute URL // #### we trim the end off it and append href return new StreamSource(context.getResourceAsStream(href)); } }; }
log.info( "base: " + base + " href: " + href );
log.debug( "base: " + base + " href: " + href );
public Source resolve(String href, String base) throws TransformerException { if (log.isDebugEnabled() ) { log.info( "base: " + base + " href: " + href ); } // pass if we don't have a systemId if (href == null) return null; // @todo // #### this is a pretty simplistic implementation. // #### we should really handle this better such that if // #### base is specified as an absolute URL // #### we trim the end off it and append href return new StreamSource(context.getResourceAsStream(href)); }
if (href == null)
if (null == href)
public Source resolve(String href, String base) throws TransformerException { if (log.isDebugEnabled() ) { log.info( "base: " + base + " href: " + href ); } // pass if we don't have a systemId if (href == null) return null; // @todo // #### this is a pretty simplistic implementation. // #### we should really handle this better such that if // #### base is specified as an absolute URL // #### we trim the end off it and append href return new StreamSource(context.getResourceAsStream(href)); }