rem
stringlengths
0
477k
add
stringlengths
0
313k
context
stringlengths
6
599k
byte[] thisMarker;
byte kid1, kid2;
public Vector linkageToChrom(File infile, int type) throws IllegalArgumentException, HaploViewException, PedFileException, IOException{ pedFile = new PedFile(); if (type == PED_FILE){ pedFile.parseLinkage(infile); }else{ pedFile.parseHapMap(infile); } Vector result = pedFile.check(); Vector indList = pedFile.getUnrelatedIndividuals(); Vector indsInTrio = new Vector(); int numMarkers = 0; numSingletons = 0; numTrios = 0; numPeds = pedFile.getNumFamilies(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); byte[] zeroArray = {0,0}; //first time through we deal with trios. for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.get(x); currentFamily = pedFile.getFamily(currentInd.getFamilyID()); if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //if indiv has both parents Individual mom = currentFamily.getMember(currentInd.getMomID()); Individual dad = currentFamily.getMember(currentInd.getDadID()); if (indList.contains(mom) && indList.contains(dad)){ numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getMomID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); } byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getDadID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); } byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else if (dad1 != 0 && dad2 != 0) { dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb, dad.getAffectedStatus(), currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); numTrios++; indsInTrio.add(mom); indsInTrio.add(dad); indsInTrio.add(currentInd); } } } for (int x=0; x<indList.size(); x++){ currentInd = (Individual)indList.get(x); if (!indsInTrio.contains(currentInd)){ //ind has no parents or kids -- he's a singleton numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } if (thisMarker[0] == thisMarker[1] || thisMarker[0] == 0 || thisMarker[1] == 0){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = (byte)(4+thisMarker[0]); chrom2[i] = (byte)(4+thisMarker[1]); } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1, currentInd.getAffectedStatus(), -1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2,currentInd.getAffectedStatus(), -1)); numSingletons++; } } chromosomes = chrom; //wipe clean any existing marker info so we know we're starting with a new file Chromosome.markers = null; return result; }
thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1];
kid1 = 0; kid2 = 0; }else{ kid1 = currentInd.getMarkerA(i); kid2 = currentInd.getMarkerB(i); } byte mom1,mom2;
public Vector linkageToChrom(File infile, int type) throws IllegalArgumentException, HaploViewException, PedFileException, IOException{ pedFile = new PedFile(); if (type == PED_FILE){ pedFile.parseLinkage(infile); }else{ pedFile.parseHapMap(infile); } Vector result = pedFile.check(); Vector indList = pedFile.getUnrelatedIndividuals(); Vector indsInTrio = new Vector(); int numMarkers = 0; numSingletons = 0; numTrios = 0; numPeds = pedFile.getNumFamilies(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); byte[] zeroArray = {0,0}; //first time through we deal with trios. for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.get(x); currentFamily = pedFile.getFamily(currentInd.getFamilyID()); if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //if indiv has both parents Individual mom = currentFamily.getMember(currentInd.getMomID()); Individual dad = currentFamily.getMember(currentInd.getDadID()); if (indList.contains(mom) && indList.contains(dad)){ numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getMomID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); } byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getDadID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); } byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else if (dad1 != 0 && dad2 != 0) { dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb, dad.getAffectedStatus(), currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); numTrios++; indsInTrio.add(mom); indsInTrio.add(dad); indsInTrio.add(currentInd); } } } for (int x=0; x<indList.size(); x++){ currentInd = (Individual)indList.get(x); if (!indsInTrio.contains(currentInd)){ //ind has no parents or kids -- he's a singleton numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } if (thisMarker[0] == thisMarker[1] || thisMarker[0] == 0 || thisMarker[1] == 0){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = (byte)(4+thisMarker[0]); chrom2[i] = (byte)(4+thisMarker[1]); } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1, currentInd.getAffectedStatus(), -1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2,currentInd.getAffectedStatus(), -1)); numSingletons++; } } chromosomes = chrom; //wipe clean any existing marker info so we know we're starting with a new file Chromosome.markers = null; return result; }
thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); } byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1];
mom1 = 0; mom2 = 0; }else{ mom1 = (currentFamily.getMember(currentInd.getMomID())).getMarkerA(i); mom2 = (currentFamily.getMember(currentInd.getMomID())).getMarkerB(i); } byte dad1,dad2;
public Vector linkageToChrom(File infile, int type) throws IllegalArgumentException, HaploViewException, PedFileException, IOException{ pedFile = new PedFile(); if (type == PED_FILE){ pedFile.parseLinkage(infile); }else{ pedFile.parseHapMap(infile); } Vector result = pedFile.check(); Vector indList = pedFile.getUnrelatedIndividuals(); Vector indsInTrio = new Vector(); int numMarkers = 0; numSingletons = 0; numTrios = 0; numPeds = pedFile.getNumFamilies(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); byte[] zeroArray = {0,0}; //first time through we deal with trios. for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.get(x); currentFamily = pedFile.getFamily(currentInd.getFamilyID()); if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //if indiv has both parents Individual mom = currentFamily.getMember(currentInd.getMomID()); Individual dad = currentFamily.getMember(currentInd.getDadID()); if (indList.contains(mom) && indList.contains(dad)){ numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getMomID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); } byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getDadID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); } byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else if (dad1 != 0 && dad2 != 0) { dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb, dad.getAffectedStatus(), currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); numTrios++; indsInTrio.add(mom); indsInTrio.add(dad); indsInTrio.add(currentInd); } } } for (int x=0; x<indList.size(); x++){ currentInd = (Individual)indList.get(x); if (!indsInTrio.contains(currentInd)){ //ind has no parents or kids -- he's a singleton numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } if (thisMarker[0] == thisMarker[1] || thisMarker[0] == 0 || thisMarker[1] == 0){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = (byte)(4+thisMarker[0]); chrom2[i] = (byte)(4+thisMarker[1]); } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1, currentInd.getAffectedStatus(), -1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2,currentInd.getAffectedStatus(), -1)); numSingletons++; } } chromosomes = chrom; //wipe clean any existing marker info so we know we're starting with a new file Chromosome.markers = null; return result; }
thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); } byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1];
dad1 = 0; dad2 = 0; }else{ dad1 = (currentFamily.getMember(currentInd.getDadID())).getMarkerA(i); dad2 = (currentFamily.getMember(currentInd.getDadID())).getMarkerB(i); }
public Vector linkageToChrom(File infile, int type) throws IllegalArgumentException, HaploViewException, PedFileException, IOException{ pedFile = new PedFile(); if (type == PED_FILE){ pedFile.parseLinkage(infile); }else{ pedFile.parseHapMap(infile); } Vector result = pedFile.check(); Vector indList = pedFile.getUnrelatedIndividuals(); Vector indsInTrio = new Vector(); int numMarkers = 0; numSingletons = 0; numTrios = 0; numPeds = pedFile.getNumFamilies(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); byte[] zeroArray = {0,0}; //first time through we deal with trios. for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.get(x); currentFamily = pedFile.getFamily(currentInd.getFamilyID()); if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //if indiv has both parents Individual mom = currentFamily.getMember(currentInd.getMomID()); Individual dad = currentFamily.getMember(currentInd.getDadID()); if (indList.contains(mom) && indList.contains(dad)){ numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getMomID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); } byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getDadID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); } byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else if (dad1 != 0 && dad2 != 0) { dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb, dad.getAffectedStatus(), currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); numTrios++; indsInTrio.add(mom); indsInTrio.add(dad); indsInTrio.add(currentInd); } } } for (int x=0; x<indList.size(); x++){ currentInd = (Individual)indList.get(x); if (!indsInTrio.contains(currentInd)){ //ind has no parents or kids -- he's a singleton numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } if (thisMarker[0] == thisMarker[1] || thisMarker[0] == 0 || thisMarker[1] == 0){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = (byte)(4+thisMarker[0]); chrom2[i] = (byte)(4+thisMarker[1]); } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1, currentInd.getAffectedStatus(), -1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2,currentInd.getAffectedStatus(), -1)); numSingletons++; } } chromosomes = chrom; //wipe clean any existing marker info so we know we're starting with a new file Chromosome.markers = null; return result; }
byte[] thisMarker;
byte thisMarkerA, thisMarkerB;
public Vector linkageToChrom(File infile, int type) throws IllegalArgumentException, HaploViewException, PedFileException, IOException{ pedFile = new PedFile(); if (type == PED_FILE){ pedFile.parseLinkage(infile); }else{ pedFile.parseHapMap(infile); } Vector result = pedFile.check(); Vector indList = pedFile.getUnrelatedIndividuals(); Vector indsInTrio = new Vector(); int numMarkers = 0; numSingletons = 0; numTrios = 0; numPeds = pedFile.getNumFamilies(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); byte[] zeroArray = {0,0}; //first time through we deal with trios. for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.get(x); currentFamily = pedFile.getFamily(currentInd.getFamilyID()); if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //if indiv has both parents Individual mom = currentFamily.getMember(currentInd.getMomID()); Individual dad = currentFamily.getMember(currentInd.getDadID()); if (indList.contains(mom) && indList.contains(dad)){ numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getMomID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); } byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getDadID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); } byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else if (dad1 != 0 && dad2 != 0) { dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb, dad.getAffectedStatus(), currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); numTrios++; indsInTrio.add(mom); indsInTrio.add(dad); indsInTrio.add(currentInd); } } } for (int x=0; x<indList.size(); x++){ currentInd = (Individual)indList.get(x); if (!indsInTrio.contains(currentInd)){ //ind has no parents or kids -- he's a singleton numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } if (thisMarker[0] == thisMarker[1] || thisMarker[0] == 0 || thisMarker[1] == 0){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = (byte)(4+thisMarker[0]); chrom2[i] = (byte)(4+thisMarker[1]); } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1, currentInd.getAffectedStatus(), -1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2,currentInd.getAffectedStatus(), -1)); numSingletons++; } } chromosomes = chrom; //wipe clean any existing marker info so we know we're starting with a new file Chromosome.markers = null; return result; }
thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } if (thisMarker[0] == thisMarker[1] || thisMarker[0] == 0 || thisMarker[1] == 0){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = (byte)(4+thisMarker[0]); chrom2[i] = (byte)(4+thisMarker[1]);
thisMarkerA = 0; thisMarkerB = 0; }else{ thisMarkerA = currentInd.getMarkerA(i); thisMarkerB = currentInd.getMarkerB(i); } if (thisMarkerA == thisMarkerB || thisMarkerA == 0 || thisMarkerB == 0){ chrom1[i] = thisMarkerA; chrom2[i] = thisMarkerB; }else{ chrom1[i] = (byte)(4+thisMarkerA); chrom2[i] = (byte)(4+thisMarkerB);
public Vector linkageToChrom(File infile, int type) throws IllegalArgumentException, HaploViewException, PedFileException, IOException{ pedFile = new PedFile(); if (type == PED_FILE){ pedFile.parseLinkage(infile); }else{ pedFile.parseHapMap(infile); } Vector result = pedFile.check(); Vector indList = pedFile.getUnrelatedIndividuals(); Vector indsInTrio = new Vector(); int numMarkers = 0; numSingletons = 0; numTrios = 0; numPeds = pedFile.getNumFamilies(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); byte[] zeroArray = {0,0}; //first time through we deal with trios. for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.get(x); currentFamily = pedFile.getFamily(currentInd.getFamilyID()); if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //if indiv has both parents Individual mom = currentFamily.getMember(currentInd.getMomID()); Individual dad = currentFamily.getMember(currentInd.getDadID()); if (indList.contains(mom) && indList.contains(dad)){ numMarkers = currentInd.getNumMarkers(); byte[] dadTb = new byte[numMarkers]; byte[] dadUb = new byte[numMarkers]; byte[] momTb = new byte[numMarkers]; byte[] momUb = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getMomID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getMomID())).getMarker(i); } byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; if (currentFamily.getMember(currentInd.getDadID()).getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = (currentFamily.getMember(currentInd.getDadID())).getMarker(i); } byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadTb[i] = dad1; dadUb[i] = dad1; } else if (dad1 != 0 && dad2 != 0) { dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); } if (mom1 == mom2) { momTb[i] = mom1; momUb[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadTb[i] = kid1; dadUb[i] = 0; } else if (dad1 == kid1) { dadTb[i] = dad1; dadUb[i] = dad2; } else { dadTb[i] = dad2; dadUb[i] = dad1; } if (mom1 == 0) { momTb[i] = kid1; momUb[i] = 0; } else if (mom1 == kid1) { momTb[i] = mom1; momUb[i] = mom2; } else { momTb[i] = mom2; momUb[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadTb[i] = 0; dadUb[i] = 0; momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadTb[i] = 0; dadUb[i] = 0; momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = 0; momUb[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momTb[i] = mom1; momUb[i] = mom1; dadUb[i] = 0; if (kid1 == mom1) { dadTb[i] = kid2; } else { dadTb[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadTb[i] = dad1; dadUb[i] = dad1; momUb[i] = 0; if (kid1 == dad1) { momTb[i] = kid2; } else { momTb[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadTb[i] = dad1; dadUb[i] = dad2; if (kid1 == dad1) { momTb[i] = kid2; momUb[i] = kid1; } else { momTb[i] = kid1; momUb[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momTb[i] = mom1; momUb[i] = mom2; if (kid1 == mom1) { dadTb[i] = kid2; dadUb[i] = kid1; } else { dadTb[i] = kid1; dadUb[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadTb[i] = dad1; dadUb[i] = dad1; momTb[i] = mom1; momUb[i] = mom1; } else { //everybody het dadTb[i] = (byte)(4+dad1); dadUb[i] = (byte)(4+dad2); momTb[i] = (byte)(4+mom1); momUb[i] = (byte)(4+mom2); } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadTb, dad.getAffectedStatus(), currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadUb, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momTb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momUb, mom.getAffectedStatus(),currentInd.getAffectedStatus())); numTrios++; indsInTrio.add(mom); indsInTrio.add(dad); indsInTrio.add(currentInd); } } } for (int x=0; x<indList.size(); x++){ currentInd = (Individual)indList.get(x); if (!indsInTrio.contains(currentInd)){ //ind has no parents or kids -- he's a singleton numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte[] thisMarker; if (currentInd.getZeroed(i)){ thisMarker = zeroArray; }else{ thisMarker = currentInd.getMarker(i); } if (thisMarker[0] == thisMarker[1] || thisMarker[0] == 0 || thisMarker[1] == 0){ chrom1[i] = thisMarker[0]; chrom2[i] = thisMarker[1]; }else{ chrom1[i] = (byte)(4+thisMarker[0]); chrom2[i] = (byte)(4+thisMarker[1]); } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1, currentInd.getAffectedStatus(), -1)); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2,currentInd.getAffectedStatus(), -1)); numSingletons++; } } chromosomes = chrom; //wipe clean any existing marker info so we know we're starting with a new file Chromosome.markers = null; return result; }
Vector unsortedMarkers = ind.getMarkers();
byte[] sortedMarkersa = new byte[ind.getNumMarkers()]; byte[] sortedMarkersb = new byte[ind.getNumMarkers()];
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet nameSearch = new HashSet(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (nameSearch.contains(name)){ dupCheck.add(name); } names.add(name); nameSearch.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ if (nameSearch.contains(hapmapGoodies[x][0])){ dupCheck.add(hapmapGoodies[x][0]); } names.add(hapmapGoodies[x][0]); nameSearch.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } if(dupCheck.size() > 0) { int nameCount = names.size(); Hashtable dupCounts = new Hashtable(); for(int i=0;i<nameCount;i++) { if(dupCheck.contains(names.get(i))){ String n = (String) names.get(i); if(dupCounts.containsKey(n)){ int numDups = ((Integer) dupCounts.get(n)).intValue(); String newName = n + "." + numDups; while (nameSearch.contains(newName)){ numDups++; newName = n + "." + numDups; } names.setElementAt(newName,i); nameSearch.add(newName); dupCounts.put(n,new Integer(numDups)) ; }else { //we leave the first instance with its original name dupCounts.put(n,new Integer(1)); } dupNames = true; } } } //sort the markers int numLines = names.size(); class SortingHelper implements Comparable{ long pos; int orderInFile; public SortingHelper(long pos, int order){ this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } } } boolean needSort = false; Vector sortHelpers = new Vector(); for (int k = 0; k < (numLines); k++){ sortHelpers.add(new SortingHelper(Long.parseLong((String)positions.get(k)),k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort){ //sort the positions Collections.sort(sortHelpers); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[sortHelpers.size()]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<sortHelpers.size();i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<sortHelpers.size();i++){ tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs //todo: this will fry an out-of-order haps file...grr Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); boolean[] unsortedZeroed = ind.getZeroedArray(); boolean[] sortedZeroed = new boolean[unsortedMarkers.size()]; Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.get(realPos[j])); sortedZeroed[j] = unsortedZeroed[realPos[j]]; } ind.setMarkers(new Vector(sortedMarkers)); ind.setZeroedArray(sortedZeroed); } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Util.roundDouble(mr.getMAF(),3); }else{ maf = Util.roundDouble((numa2/(numa1+numa2)),3); } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } }
boolean[] sortedZeroed = new boolean[unsortedMarkers.size()]; Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.get(realPos[j]));
boolean[] sortedZeroed = new boolean[unsortedZeroed.length]; for (int j = 0; j < ind.getNumMarkers(); j++){ sortedMarkersa[j] = ind.getMarkerA(realPos[j]); sortedMarkersb[j] = ind.getMarkerB(realPos[j]);
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet nameSearch = new HashSet(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (nameSearch.contains(name)){ dupCheck.add(name); } names.add(name); nameSearch.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ if (nameSearch.contains(hapmapGoodies[x][0])){ dupCheck.add(hapmapGoodies[x][0]); } names.add(hapmapGoodies[x][0]); nameSearch.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } if(dupCheck.size() > 0) { int nameCount = names.size(); Hashtable dupCounts = new Hashtable(); for(int i=0;i<nameCount;i++) { if(dupCheck.contains(names.get(i))){ String n = (String) names.get(i); if(dupCounts.containsKey(n)){ int numDups = ((Integer) dupCounts.get(n)).intValue(); String newName = n + "." + numDups; while (nameSearch.contains(newName)){ numDups++; newName = n + "." + numDups; } names.setElementAt(newName,i); nameSearch.add(newName); dupCounts.put(n,new Integer(numDups)) ; }else { //we leave the first instance with its original name dupCounts.put(n,new Integer(1)); } dupNames = true; } } } //sort the markers int numLines = names.size(); class SortingHelper implements Comparable{ long pos; int orderInFile; public SortingHelper(long pos, int order){ this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } } } boolean needSort = false; Vector sortHelpers = new Vector(); for (int k = 0; k < (numLines); k++){ sortHelpers.add(new SortingHelper(Long.parseLong((String)positions.get(k)),k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort){ //sort the positions Collections.sort(sortHelpers); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[sortHelpers.size()]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<sortHelpers.size();i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<sortHelpers.size();i++){ tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs //todo: this will fry an out-of-order haps file...grr Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); boolean[] unsortedZeroed = ind.getZeroedArray(); boolean[] sortedZeroed = new boolean[unsortedMarkers.size()]; Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.get(realPos[j])); sortedZeroed[j] = unsortedZeroed[realPos[j]]; } ind.setMarkers(new Vector(sortedMarkers)); ind.setZeroedArray(sortedZeroed); } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Util.roundDouble(mr.getMAF(),3); }else{ maf = Util.roundDouble((numa2/(numa1+numa2)),3); } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } }
ind.setMarkers(new Vector(sortedMarkers));
ind.setMarkers(sortedMarkersa, sortedMarkersb);
void prepareMarkerInput(File infile, String[][] hapmapGoodies) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists //An optional third column is supported which is designed to hold //association study data. If there is a third column there will be //a visual indicator in the D' display that there is additional data //and the detailed data can be viewed with a mouse press. Vector names = new Vector(); HashSet nameSearch = new HashSet(); HashSet dupCheck = new HashSet(); Vector positions = new Vector(); Vector extras = new Vector(); dupsToBeFlagged = false; dupNames = false; try{ if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; long prevloc = -1000000000; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } String name = st.nextToken(); String l = st.nextToken(); String extra = null; if (st.hasMoreTokens()) extra = st.nextToken(); long loc; try{ loc = Long.parseLong(l); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + l + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } //basically if anyone is crazy enough to load a dataset, then go back and load //an out-of-order info file we tell them to bugger off and start over. if (loc < prevloc && Chromosome.markers != null){ throw new HaploViewException("Info file out of order with preloaded dataset:\n"+ name + "\nPlease reload data file and info file together."); } prevloc = loc; if (nameSearch.contains(name)){ dupCheck.add(name); } names.add(name); nameSearch.add(name); positions.add(l); extras.add(extra); } if (lineCount > Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too many\nmarkers in info file compared to data file.")); } if (lineCount < Chromosome.getUnfilteredSize()){ throw(new HaploViewException("Info file error:\nMarker number mismatch: too few\nmarkers in info file compared to data file.")); } infoKnown=true; } if (hapmapGoodies != null){ //we know some stuff from the hapmap so we'll add it here for (int x=0; x < hapmapGoodies.length; x++){ if (nameSearch.contains(hapmapGoodies[x][0])){ dupCheck.add(hapmapGoodies[x][0]); } names.add(hapmapGoodies[x][0]); nameSearch.add(hapmapGoodies[x][0]); positions.add(hapmapGoodies[x][1]); extras.add(null); } infoKnown = true; } if(dupCheck.size() > 0) { int nameCount = names.size(); Hashtable dupCounts = new Hashtable(); for(int i=0;i<nameCount;i++) { if(dupCheck.contains(names.get(i))){ String n = (String) names.get(i); if(dupCounts.containsKey(n)){ int numDups = ((Integer) dupCounts.get(n)).intValue(); String newName = n + "." + numDups; while (nameSearch.contains(newName)){ numDups++; newName = n + "." + numDups; } names.setElementAt(newName,i); nameSearch.add(newName); dupCounts.put(n,new Integer(numDups)) ; }else { //we leave the first instance with its original name dupCounts.put(n,new Integer(1)); } dupNames = true; } } } //sort the markers int numLines = names.size(); class SortingHelper implements Comparable{ long pos; int orderInFile; public SortingHelper(long pos, int order){ this.pos = pos; this.orderInFile = order; } public int compareTo(Object o) { SortingHelper sh = (SortingHelper)o; if (sh.pos > pos){ return -1; }else if (sh.pos < pos){ return 1; }else{ return 0; } } } boolean needSort = false; Vector sortHelpers = new Vector(); for (int k = 0; k < (numLines); k++){ sortHelpers.add(new SortingHelper(Long.parseLong((String)positions.get(k)),k)); } //loop through and check if any markers are out of order for (int k = 1; k < (numLines); k++){ if(((SortingHelper)sortHelpers.get(k)).compareTo(sortHelpers.get(k-1)) < 0) { needSort = true; break; } } //if any were out of order, then we need to put them in order if(needSort){ //sort the positions Collections.sort(sortHelpers); Vector newNames = new Vector(); Vector newExtras = new Vector(); Vector newPositions = new Vector(); int[] realPos = new int[numLines]; //reorder the vectors names and extras so that they have the same order as the sorted markers for (int i = 0; i < sortHelpers.size(); i++){ realPos[i] = ((SortingHelper)sortHelpers.get(i)).orderInFile; newNames.add(names.get(realPos[i])); newPositions.add(positions.get(realPos[i])); newExtras.add(extras.get(realPos[i])); } names = newNames; extras = newExtras; positions = newPositions; byte[] tempGenotype = new byte[sortHelpers.size()]; //now we reorder all the individuals genotypes according to the sorted marker order for(int j=0;j<chromosomes.size();j++){ Chromosome tempChrom = (Chromosome)chromosomes.elementAt(j); for(int i =0;i<sortHelpers.size();i++){ tempGenotype[i] = tempChrom.getUnfilteredGenotype(realPos[i]); } for(int i=0;i<sortHelpers.size();i++){ tempChrom.setGenotype(tempGenotype[i],i); } } //sort pedfile objects //todo: this should really be done before pedfile is subjected to any processing. //todo: that would require altering some order of operations in dealing with inputs //todo: this will fry an out-of-order haps file...grr Vector unsortedRes = pedFile.getResults(); Vector sortedRes = new Vector(); for (int i = 0; i < realPos.length; i++){ sortedRes.add(unsortedRes.elementAt(realPos[i])); } pedFile.setResults(sortedRes); Vector o = pedFile.getAllIndividuals(); for (int i = 0; i < o.size(); i++){ Individual ind = (Individual) o.get(i); Vector unsortedMarkers = ind.getMarkers(); boolean[] unsortedZeroed = ind.getZeroedArray(); boolean[] sortedZeroed = new boolean[unsortedMarkers.size()]; Vector sortedMarkers = new Vector(); for (int j = 0; j < unsortedMarkers.size(); j++){ sortedMarkers.add(unsortedMarkers.get(realPos[j])); sortedZeroed[j] = unsortedZeroed[realPos[j]]; } ind.setMarkers(new Vector(sortedMarkers)); ind.setZeroedArray(sortedZeroed); } } }catch (HaploViewException e){ throw(e); }finally{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); double[] numBadGenotypes = new double[Chromosome.getUnfilteredSize()]; percentBadGenotypes = new double[Chromosome.getUnfilteredSize()]; Vector results = null; if (pedFile != null){ results = pedFile.getResults(); } long prevPosition = Long.MIN_VALUE; SNP prevMarker = null; MarkerResult pmr = null; for (int i = 0; i < Chromosome.getUnfilteredSize(); i++){ MarkerResult mr = null; if (results != null){ mr = (MarkerResult)results.elementAt(i); } //to compute minor/major alleles, browse chrom list and count instances of each allele byte a1 = 0; byte a2 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getUnfilteredGenotype(i); if (!(thisAllele == 0)){ if (thisAllele >= 5){ numa1+=0.5; numa2+=0.5; if (thisAllele < 9){ if (a1==0){ a1 = (byte)(thisAllele-4); }else if (a2 == 0){ if (!(thisAllele-4 == a1)){ a2 = (byte)(thisAllele-4); } } } }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; a2 = thisAllele; } } else { numBadGenotypes[i]++; } } if (numa2 > numa1){ byte temp = a1; double tempnum = numa1; numa1 = numa2; a1 = a2; numa2 = tempnum; a2 = temp; } double maf; if (mr != null){ maf = Util.roundDouble(mr.getMAF(),3); }else{ maf = Util.roundDouble((numa2/(numa1+numa2)),3); } if (infoKnown){ long pos = Long.parseLong((String)positions.elementAt(i)); SNP thisMarker = (new SNP((String)names.elementAt(i), pos, maf, a1, a2, (String)extras.elementAt(i))); markerInfo.add(thisMarker); if (mr != null){ double genoPC = mr.getGenoPercent(); //check to make sure adjacent SNPs do not have identical positions if (prevPosition != Long.MIN_VALUE){ //only do this for markers 2..N, since we're comparing to the previous location if (pos == prevPosition){ dupsToBeFlagged = true; if (genoPC >= pmr.getGenoPercent()){ //use this one because it has more genotypes thisMarker.setDup(1); prevMarker.setDup(2); }else{ //use the other one because it has more genotypes thisMarker.setDup(2); prevMarker.setDup(1); } } } prevPosition = pos; prevMarker = thisMarker; pmr = mr; } }else{ markerInfo.add(new SNP("Marker " + String.valueOf(i+1), (i*4000), maf,a1,a2)); } percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo; } }
Map params = config.getParamValues(); final String jndiFactory = (String)params.get(JSR160ApplicationConfig.JNDI_FACTORY); final String jndiURL = (String)params.get(JSR160ApplicationConfig.JNDI_URL); if(jndiFactory != null) env.put(JSR160ApplicationConfig.JNDI_FACTORY, jndiFactory); if(jndiURL != null) env.put(JSR160ApplicationConfig.JNDI_URL, jndiURL);
public ServerConnection getServerConnection(ApplicationConfig config) throws ConnectionFailedException { try { /* Create an RMI connector client */ HashMap env = new HashMap(); String[] credentials = new String[] {config.getUsername(), config.getPassword()}; env.put("jmx.remote.credentials", credentials); JMXServiceURL url = new JMXServiceURL(config.getURL()); JMXConnector jmxc = JMXConnectorFactory.connect(url, env); return new JSR160ServerConnection(jmxc, jmxc.getMBeanServerConnection()); } catch (Throwable e) { throw new ConnectionFailedException(e); } }
org.apache.log4j.Logger folderLog = org.apache.log4j.Logger.getLogger( PhotoFolder.class.getName() ); folderLog.setLevel( org.apache.log4j.Level.DEBUG );
public static void main( String[] args ) { org.apache.log4j.BasicConfigurator.configure(); log.setLevel( org.apache.log4j.Level.DEBUG ); junit.textui.TestRunner.run( suite() ); }
PhotoFolder subfolder = PhotoFolder.create( "New subfolder", folder ); assertTrue( "Not notified of subfolder structure change", l1.structureModified ); assertEquals( "subfolder info not correct", folder, l1.structChangeFolder ); l1.structureModified = false; l1.changedFolder = null; subfolder.setDescription( "Changed subfolder" ); assertTrue( "l1 not called for subfolder modification", l1.subfolderModified ); assertEquals( "subfolder info not correct", subfolder, l1.changedFolder ); l1.subfolderModified = false; l1.changedFolder = null; subfolder.delete(); assertTrue( "Not notified of subfolder structure change", l1.structureModified ); assertEquals( "subfolder info not correct", folder, l1.structChangeFolder );
public void testListener() { PhotoFolder folder = PhotoFolder.create( "testListener", null ); TestListener l1 = new TestListener(); TestListener l2 = new TestListener(); folder.addPhotoCollectionChangeListener( l1 ); folder.addPhotoCollectionChangeListener( l2 ); folder.setName( "testLiistener" ); assertTrue( "l1 not called", l1.modified ); assertTrue( "l2 not called", l2.modified ); l1.modified = false; l2.modified = false; folder.removePhotoCollectionChangeListener( l2 ); folder.setDescription( "Folder usded to test listener support" ); assertTrue( "l1 not called", l1.modified ); assertFalse( "l2 should not have been called", l2.modified ); // TODO: test other fields }
plinkFilters = null;
void readWGA(String[] inputOptions) { String wgaFile = inputOptions[0]; String mapFile = inputOptions[1]; String secondaryFile = inputOptions[2]; String embeddedMap = inputOptions[3]; boolean embed = false; this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); Plink plink = new Plink(this); try{ if (embeddedMap != null){ embed = true; } if (wgaFile != null){ plink.parseWGA(wgaFile,mapFile,embed); } if (secondaryFile != null){ plink.parseMoreResults(secondaryFile); } plinkPanel = new PlinkResultsPanel(this,plink.getResults(),plink.getColumnNames(), plinkFilters); HaploviewTab plinkTab = new HaploviewTab(plinkPanel); plinkTab.add(plinkPanel); tabs = new HaploviewTabbedPane(); tabs.addTab(VIEW_PLINK, plinkTab); readMarkerItem.setEnabled(false); analysisItem.setEnabled(false); blocksItem.setEnabled(false); gbrowseItem.setEnabled(false); for (int i = 0; i < exportItems.length; i++) { exportMenuItems[i].setEnabled(false); } displayMenu.setEnabled(false); analysisMenu.setEnabled(false); keyMenu.setEnabled(false); tabs.setSelectedComponent(plinkTab); Container contents = getContentPane(); contents.removeAll(); contents.repaint(); contents.add(tabs); repaint(); setVisible(true); }catch(PlinkException wge){ JOptionPane.showMessageDialog(this, wge.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }
final JellyContext newContext = new JellyContext(context);
final JellyContext newContext = context.newJellyContext();
public void doTag(final XMLOutput output) throws Exception { if ( xmlOutput == null ) { // lets default to system.out xmlOutput = XMLOutput.createXMLOutput( System.out ); } // lets create a child context final JellyContext newContext = new JellyContext(context); Thread thread = new Thread( new Runnable() { public void run() { try { getBody().run(newContext, xmlOutput); xmlOutput.close(); } catch (Exception e) { e.printStackTrace(); } } } ); if ( name != null ) { thread.setName( name ); } thread.start(); }
xmlOutput.close();
if (closeOutput) { xmlOutput.close(); } else { xmlOutput.flush(); }
public void doTag(final XMLOutput output) throws Exception { if ( xmlOutput == null ) { // lets default to system.out xmlOutput = XMLOutput.createXMLOutput( System.out ); } // lets create a child context final JellyContext newContext = new JellyContext(context); Thread thread = new Thread( new Runnable() { public void run() { try { getBody().run(newContext, xmlOutput); xmlOutput.close(); } catch (Exception e) { e.printStackTrace(); } } } ); if ( name != null ) { thread.setName( name ); } thread.start(); }
xmlOutput.close();
if (closeOutput) { xmlOutput.close(); } else { xmlOutput.flush(); }
public void run() { try { getBody().run(newContext, xmlOutput); xmlOutput.close(); } catch (Exception e) { e.printStackTrace(); } }
this.closeOutput = true;
public void setFile(String name) throws IOException { setXmlOutput( XMLOutput.createXMLOutput(new FileOutputStream(name)) ); }
this.closeOutput = false;
public void setXmlOutput(XMLOutput xmlOutput) { this.xmlOutput = xmlOutput; }
DataSource src = connDesc.getDataSource();
public void upgradeDatabase() { fireStatusChangeEvent( new SchemaUpdateEvent( PHASE_ALTERING_SCHEMA, 0 ) ); int oldVersion = db.getSchemaVersion(); // Find needed information fr DdlUtils ConnectionRepository cr = MetadataManager.getInstance().connectionRepository(); PBKey connKey = cr.getStandardPBKeyForJcdAlias( "pv" ); JdbcConnectionDescriptor connDesc = cr.getDescriptor( connKey ); DataSource src = connDesc.getDataSource(); String jdbcDriver = connDesc.getDriver(); Platform platform = null; if ( jdbcDriver.equals( "org.apache.derby.jdbc.EmbeddedDriver" ) ) { platform = PlatformFactory.createNewPlatformInstance( "derby" ); } else if ( jdbcDriver.equals( "com.mysql.jdbc.Driver" ) ){ platform = PlatformFactory.createNewPlatformInstance( "mysql" ); } platform.getPlatformInfo().setDelimiterToken( "" ); // Get the database schema XML file InputStream schemaIS = getClass().getClassLoader().getResourceAsStream( "photovault_schema.xml" ); Database dbModel = new DatabaseIO().read( new InputStreamReader( schemaIS ) ); // Alter tables to match corrent schema PersistenceBroker broker = PersistenceBrokerFactory.createPersistenceBroker( connKey ); broker.beginTransaction(); try { Connection con = broker.serviceConnectionManager().getConnection(); /* TODO: Derby alter table statements created by DdlUtils have wrong syntax. Luckily we do not need to do such modifications for now. There is error report for DdlUtils (http://issues.apache.org/jira/browse/DDLUTILS-53), after it has been corrected the alterColumns flag should be set to true. */ System.out.println( platform.getAlterTablesSql( con, dbModel, false, true, true ) ); platform.alterTables( con, dbModel, false, true, true ); } catch (DynaSqlException ex) { ex.printStackTrace(); } catch ( LookupException ex ) { ex.printStackTrace(); } broker.commitTransaction(); broker.close(); if ( oldVersion < 4 ) { // In older version hashcolumn was not included in schema so we must fill it. createHashes(); } DbInfo info = DbInfo.getDbInfo(); info.setVersion( db.CURRENT_SCHEMA_VERSION ); fireStatusChangeEvent( new SchemaUpdateEvent( PHASE_COMPLETE, 100 ) ); }
if (Options.getAssocTest() == ASSOC_TRIO){ results[k][z].setTransCount(theEM.getTransCount(z)); results[k][z].setUntransCount(theEM.getUntransCount(z)); }else if (Options.getAssocTest() == ASSOC_CC){ results[k][z].setCaseFreq(theEM.getCaseFreq(z)); results[k][z].setControlFreq(theEM.getControlFreq(z)); }
Haplotype[][] generateHaplotypes(Vector blocks, int hapthresh, boolean crossover) throws HaploViewException{ //TODO: output indiv hap estimates Haplotype[][] results = new Haplotype[blocks.size()][]; //String raw = new String(); //String currentLine; this.totalBlocks = blocks.size(); this.blocksDone = 0; for (int k = 0; k < blocks.size(); k++){ this.blocksDone++; int[] preFiltBlock = (int[])blocks.elementAt(k); int[] theBlock; int[] selectedMarkers = new int[0]; int[] equivClass = new int[0]; if (preFiltBlock.length > 30){ equivClass = new int[preFiltBlock.length]; int classCounter = 0; for (int x = 0; x < preFiltBlock.length; x++){ int marker1 = preFiltBlock[x]; //already been lumped into an equivalency class if (equivClass[x] != 0){ continue; } //start a new equivalency class for this SNP classCounter ++; equivClass[x] = classCounter; for (int y = x+1; y < preFiltBlock.length; y++){ int marker2 = preFiltBlock[y]; if (marker1 > marker2){ int tmp = marker1; marker1 = marker2; marker2 = tmp; } if ( dpTable.getLDStats(marker1,marker2).getRSquared() == 1.0){ //these two SNPs are redundant equivClass[y] = classCounter; } } } //parse equivalency classes selectedMarkers = new int[classCounter]; for (int x = 0; x < selectedMarkers.length; x++){ selectedMarkers[x] = -1; } for (int x = 0; x < classCounter; x++){ double genoPC = 1.0; for (int y = 0; y < equivClass.length; y++){ if (equivClass[y] == x+1){ if (percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]] <= genoPC){ selectedMarkers[x] = preFiltBlock[y]; genoPC = percentBadGenotypes[Chromosome.realIndex[preFiltBlock[y]]]; } } } } theBlock = selectedMarkers; }else{ theBlock = preFiltBlock; } //kirby patch EM theEM = new EM(chromosomes,numTrios); theEM.doEM(theBlock); int p = 0; Haplotype[] tempArray = new Haplotype[theEM.numHaplos()]; int[][] returnedHaplos = theEM.getHaplotypes(); double[] returnedFreqs = theEM.getFrequencies(); for (int i = 0; i < theEM.numHaplos(); i++){ int[] genos = new int[returnedHaplos[i].length]; for (int j = 0; j < genos.length; j++){ if (returnedHaplos[i][j] == 1){ genos[j] = Chromosome.getMarker(theBlock[j]).getMajor(); }else{ if (Chromosome.getMarker(theBlock[j]).getMinor() == 0){ genos[j] = 8; }else{ genos[j] = Chromosome.getMarker(theBlock[j]).getMinor(); } } } if (selectedMarkers.length > 0){ //we need to reassemble the haplotypes Hashtable hapsHash = new Hashtable(); //add to hash all the genotypes we phased for (int q = 0; q < genos.length; q++){ hapsHash.put(new Integer(theBlock[q]), new Integer(genos[q])); } //now add all the genotypes we didn't bother phasing, based on //which marker they are identical to for (int q = 0; q < equivClass.length; q++){ int currentClass = equivClass[q]-1; if (selectedMarkers[currentClass] == preFiltBlock[q]){ //we alredy added the phased genotypes above continue; } int indexIntoBlock=0; for (int x = 0; x < theBlock.length; x++){ if (theBlock[x] == selectedMarkers[currentClass]){ indexIntoBlock = x; break; } } //this (somewhat laboriously) reconstructs whether to add the minor or major allele //for markers with MAF close to 0.50 we can't use major/minor alleles to match //'em up 'cause these might change given missing data if (Chromosome.getMarker(selectedMarkers[currentClass]).getMAF() > 0.4){ for (int z = 0; z < chromosomes.size(); z++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(z); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++z); int theGeno = thisChrom.getGenotype(selectedMarkers[currentClass]); int nextGeno = nextChrom.getGenotype(selectedMarkers[currentClass]); if (theGeno == nextGeno && theGeno == genos[indexIntoBlock] && thisChrom.getGenotype(preFiltBlock[q]) != 0){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(thisChrom.getGenotype(preFiltBlock[q]))); } } }else{ if (Chromosome.getMarker(selectedMarkers[currentClass]).getMajor() == genos[indexIntoBlock]){ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMajor())); }else{ hapsHash.put(new Integer(preFiltBlock[q]), new Integer(Chromosome.getMarker(preFiltBlock[q]).getMinor())); } } } genos = new int[preFiltBlock.length]; for (int q = 0; q < preFiltBlock.length; q++){ genos[q] = ((Integer)hapsHash.get(new Integer(preFiltBlock[q]))).intValue(); } } double tempPerc = returnedFreqs[i]; if (tempPerc*100 > hapthresh){ tempArray[p] = new Haplotype(genos, tempPerc, preFiltBlock); p++; } } //make the results array only large enough to hold haps //which pass threshold above results[k] = new Haplotype[p]; for (int z = 0; z < p; z++){ results[k][z] = tempArray[z]; if (Options.getAssocTest() == ASSOC_TRIO){ results[k][z].setTransCount(theEM.getTransCount(z)); results[k][z].setUntransCount(theEM.getUntransCount(z)); }else if (Options.getAssocTest() == ASSOC_CC){ results[k][z].setCaseFreq(theEM.getCaseFreq(z)); results[k][z].setControlFreq(theEM.getControlFreq(z)); } } } if (!crossover){ haplotypes = results; } return results; }
while (test.evaluateAsBoolean(getContext())) {
try { while (test.evaluateAsBoolean(getContext())) { if (log.isDebugEnabled()) { log.debug("evaluated to true! gonna keep on chuggin!"); } invokeBody(output); } } catch (BreakException e) {
public void doTag(XMLOutput output) throws MissingAttributeException, Exception { if (test != null) { while (test.evaluateAsBoolean(getContext())) { if (log.isDebugEnabled()) { log.debug("evaluated to true! gonna keep on chuggin!"); } invokeBody(output); } } else { throw new MissingAttributeException("test"); } }
log.debug("evaluated to true! gonna keep on chuggin!");
log.debug("loop terminated by break: " + e, e);
public void doTag(XMLOutput output) throws MissingAttributeException, Exception { if (test != null) { while (test.evaluateAsBoolean(getContext())) { if (log.isDebugEnabled()) { log.debug("evaluated to true! gonna keep on chuggin!"); } invokeBody(output); } } else { throw new MissingAttributeException("test"); } }
invokeBody(output);
public void doTag(XMLOutput output) throws MissingAttributeException, Exception { if (test != null) { while (test.evaluateAsBoolean(getContext())) { if (log.isDebugEnabled()) { log.debug("evaluated to true! gonna keep on chuggin!"); } invokeBody(output); } } else { throw new MissingAttributeException("test"); } }
this (pp, pp.findTablePane(model.getPkTable()), pp.findTablePane(model.getFkTable()));
this.pp = pp; this.model = model; setPkTable(pp.findTablePane(model.getPkTable())); setFkTable(pp.findTablePane(model.getFkTable())); setup();
public Relationship(PlayPen pp, SQLRelationship model) throws ArchitectException { this (pp, pp.findTablePane(model.getPkTable()), pp.findTablePane(model.getFkTable())); }
dPrimeDisplay.setVisible(false); dPrimeDisplay.setVisible(true);
public void stateChanged(ChangeEvent e) { viewMenuItems[tabs.getSelectedIndex()].setSelected(true); if (checkPanel != null && checkPanel.changed){ window.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); JTable table = checkPanel.getTable(); boolean[] markerResults = new boolean[table.getRowCount()]; for (int i = 0; i < table.getRowCount(); i++){ markerResults[i] = ((Boolean)table.getValueAt(i,7)).booleanValue(); } int count = 0; for (int i = 0; i < Chromosome.getSize(); i++){ if (markerResults[i]){ count++; } } Chromosome.realIndex = new int[count]; int k = 0; for (int i =0; i < Chromosome.getSize(); i++){ if (markerResults[i]){ Chromosome.realIndex[k] = i; k++; } } theData.filteredDPrimeTable = theData.getFilteredTable(); theData.guessBlocks(currentBlockDef); //hack-y way to refresh the image dPrimeDisplay.setVisible(false); dPrimeDisplay.setVisible(true); hapDisplay.theData = theData; try{ hapDisplay.getHaps(); }catch(HaploViewException hv){ JOptionPane.showMessageDialog(window, hv.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } tdtPanel.refreshTable(); //System.out.println(tabs.getComponentAt(VIEW_TDT_NUM)); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); checkPanel.changed=false; } }
}else if (command == "Zoomed"){ if (dPrimeDisplay != null){ dPrimeDisplay.zoom(0); } }else if (command == "Medium"){ if (dPrimeDisplay != null){ dPrimeDisplay.zoom(1); } }else if (command == "Unzoomed"){ if (dPrimeDisplay != null){ dPrimeDisplay.zoom(2); }
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); } else if (command == READ_MARKERS){ fc.setSelectedFile(null); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile()); } }else if (command == CLEAR_BLOCKS){ theData.guessBlocks(3); dPrimeDisplay.refresh(); try{ hapDisplay.getHaps(); }catch(HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } hapScroller.setViewportView(hapDisplay); }else if (command == DEFINE_BLOCKS){ try { defineBlocks(); }catch(HaploViewException hve) { JOptionPane.showMessageDialog(this, hve.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
affSingletons.add(new Integer(thisChrom.getAffected()));
if(addAff) { affSingletons.add(new Integer(thisChrom.getAffected())); }
public void doEM(int[] theBlock) throws HaploViewException{ //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } byte[] thisHap; Vector inputHaploSingletons = new Vector(); Vector inputHaploTrios = new Vector(); Vector affSingletons = new Vector(); Vector affTrios = new Vector(); //whichVector[i] stores a value which indicates which vector chromosome i's genotype should go in //1 indicates inputHaploSingletons (singletons), 2 indicates inputHaploTrios, //3 indicates a person from a broken trio who is treated as a singleton //0 indicates none (too much missing data) int[] whichVector = new int[chromosomes.size()]; for(int i=0;i<numTrios*4; i+=4) { Chromosome parentAFirst = (Chromosome) chromosomes.elementAt(i); Chromosome parentASecond = (Chromosome) chromosomes.elementAt(i+1); Chromosome parentBFirst = (Chromosome) chromosomes.elementAt(i+2); Chromosome parentBSecond = (Chromosome) chromosomes.elementAt(i+3); boolean tooManyMissingInASegmentA = false; boolean tooManyMissingInASegmentB = false; int totalMissingA = 0; int totalMissingB = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missingA = 0; int missingB = 0; for (int j = 0; j < block_size[n]; j++){ byte AFirstGeno = parentAFirst.getGenotype(theBlock[segmentShift+j]); byte ASecondGeno = parentASecond.getGenotype(theBlock[segmentShift+j]); byte BFirstGeno = parentBFirst.getGenotype(theBlock[segmentShift+j]); byte BSecondGeno = parentBSecond.getGenotype(theBlock[segmentShift+j]); if(AFirstGeno == 0 || ASecondGeno == 0) missingA++; if(BFirstGeno == 0 || BSecondGeno == 0) missingB++; } segmentShift += block_size[n]; if (missingA >= MISSINGLIMIT){ tooManyMissingInASegmentA = true; } if (missingB >= MISSINGLIMIT){ tooManyMissingInASegmentB = true; } totalMissingA += missingA; totalMissingB += missingB; } if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3 && !tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //both parents are good so all 4 chroms are added as a trio whichVector[i] = 2; whichVector[i+1] = 2; whichVector[i+2] = 2; whichVector[i+3] = 2; } else if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3) { //first person good, so he's added as a singleton, other parent is dropped whichVector[i] = 3; whichVector[i+1] =3; whichVector[i+2] =0; whichVector[i+3]=0; } else if(!tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //second person good, so he's added as a singleton, other parent is dropped whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =3; whichVector[i+3]=3; } else { //both people have too much missing data so neither is used whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =0; whichVector[i+3]=0; } } for (int i = numTrios*4; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); boolean tooManyMissingInASegment = false; int totalMissing = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missing = 0; for (int j = 0; j < block_size[n]; j++){ byte theGeno = thisChrom.getGenotype(theBlock[segmentShift+j]); byte nextGeno = nextChrom.getGenotype(theBlock[segmentShift+j]); if(theGeno == 0 || nextGeno == 0) missing++; } segmentShift += block_size[n]; if (missing >= MISSINGLIMIT){ tooManyMissingInASegment = true; } totalMissing += missing; } //we want to use chromosomes without too many missing genotypes in a given //subsegment (first term) or without too many missing genotypes in the //whole block (second term) if (!tooManyMissingInASegment && totalMissing <= 1+theBlock.length/3){ whichVector[i-1] = 1; whichVector[i] = 1; } } for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); if(whichVector[i] > 0) { thisHap = new byte[theBlock.length]; for (int j = 0; j < theBlock.length; j++){ byte a1 = Chromosome.getMarker(theBlock[j]).getMajor(); byte a2 = Chromosome.getMarker(theBlock[j]).getMinor(); byte theGeno = thisChrom.getGenotype(theBlock[j]); if (theGeno >= 5){ thisHap[j] = 'h'; } else { if (theGeno == a1){ thisHap[j] = '1'; }else if (theGeno == a2){ thisHap[j] = '2'; }else{ thisHap[j] = '0'; } } } if(whichVector[i] == 1) { inputHaploSingletons.add(thisHap); affSingletons.add(new Integer(thisChrom.getAffected())); } else if(whichVector[i] ==2) { inputHaploTrios.add(thisHap); affTrios.add(new Integer(thisChrom.getAffected())); }else if (whichVector[i] == 3){ inputHaploSingletons.add(thisHap); affSingletons.add(new Integer(0)); } } } int trioCount = inputHaploTrios.size() / 4; inputHaploTrios.addAll(inputHaploSingletons); affTrios.addAll(affSingletons); byte[][] input_haplos = (byte[][])inputHaploTrios.toArray(new byte[0][0]); full_em_breakup(input_haplos, block_size, trioCount, affTrios); }
affTrios.add(new Integer(thisChrom.getAffected()));
if(addAff) { affTrios.add(new Integer(thisChrom.getAffected())); }
public void doEM(int[] theBlock) throws HaploViewException{ //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } byte[] thisHap; Vector inputHaploSingletons = new Vector(); Vector inputHaploTrios = new Vector(); Vector affSingletons = new Vector(); Vector affTrios = new Vector(); //whichVector[i] stores a value which indicates which vector chromosome i's genotype should go in //1 indicates inputHaploSingletons (singletons), 2 indicates inputHaploTrios, //3 indicates a person from a broken trio who is treated as a singleton //0 indicates none (too much missing data) int[] whichVector = new int[chromosomes.size()]; for(int i=0;i<numTrios*4; i+=4) { Chromosome parentAFirst = (Chromosome) chromosomes.elementAt(i); Chromosome parentASecond = (Chromosome) chromosomes.elementAt(i+1); Chromosome parentBFirst = (Chromosome) chromosomes.elementAt(i+2); Chromosome parentBSecond = (Chromosome) chromosomes.elementAt(i+3); boolean tooManyMissingInASegmentA = false; boolean tooManyMissingInASegmentB = false; int totalMissingA = 0; int totalMissingB = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missingA = 0; int missingB = 0; for (int j = 0; j < block_size[n]; j++){ byte AFirstGeno = parentAFirst.getGenotype(theBlock[segmentShift+j]); byte ASecondGeno = parentASecond.getGenotype(theBlock[segmentShift+j]); byte BFirstGeno = parentBFirst.getGenotype(theBlock[segmentShift+j]); byte BSecondGeno = parentBSecond.getGenotype(theBlock[segmentShift+j]); if(AFirstGeno == 0 || ASecondGeno == 0) missingA++; if(BFirstGeno == 0 || BSecondGeno == 0) missingB++; } segmentShift += block_size[n]; if (missingA >= MISSINGLIMIT){ tooManyMissingInASegmentA = true; } if (missingB >= MISSINGLIMIT){ tooManyMissingInASegmentB = true; } totalMissingA += missingA; totalMissingB += missingB; } if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3 && !tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //both parents are good so all 4 chroms are added as a trio whichVector[i] = 2; whichVector[i+1] = 2; whichVector[i+2] = 2; whichVector[i+3] = 2; } else if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3) { //first person good, so he's added as a singleton, other parent is dropped whichVector[i] = 3; whichVector[i+1] =3; whichVector[i+2] =0; whichVector[i+3]=0; } else if(!tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //second person good, so he's added as a singleton, other parent is dropped whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =3; whichVector[i+3]=3; } else { //both people have too much missing data so neither is used whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =0; whichVector[i+3]=0; } } for (int i = numTrios*4; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); boolean tooManyMissingInASegment = false; int totalMissing = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missing = 0; for (int j = 0; j < block_size[n]; j++){ byte theGeno = thisChrom.getGenotype(theBlock[segmentShift+j]); byte nextGeno = nextChrom.getGenotype(theBlock[segmentShift+j]); if(theGeno == 0 || nextGeno == 0) missing++; } segmentShift += block_size[n]; if (missing >= MISSINGLIMIT){ tooManyMissingInASegment = true; } totalMissing += missing; } //we want to use chromosomes without too many missing genotypes in a given //subsegment (first term) or without too many missing genotypes in the //whole block (second term) if (!tooManyMissingInASegment && totalMissing <= 1+theBlock.length/3){ whichVector[i-1] = 1; whichVector[i] = 1; } } for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); if(whichVector[i] > 0) { thisHap = new byte[theBlock.length]; for (int j = 0; j < theBlock.length; j++){ byte a1 = Chromosome.getMarker(theBlock[j]).getMajor(); byte a2 = Chromosome.getMarker(theBlock[j]).getMinor(); byte theGeno = thisChrom.getGenotype(theBlock[j]); if (theGeno >= 5){ thisHap[j] = 'h'; } else { if (theGeno == a1){ thisHap[j] = '1'; }else if (theGeno == a2){ thisHap[j] = '2'; }else{ thisHap[j] = '0'; } } } if(whichVector[i] == 1) { inputHaploSingletons.add(thisHap); affSingletons.add(new Integer(thisChrom.getAffected())); } else if(whichVector[i] ==2) { inputHaploTrios.add(thisHap); affTrios.add(new Integer(thisChrom.getAffected())); }else if (whichVector[i] == 3){ inputHaploSingletons.add(thisHap); affSingletons.add(new Integer(0)); } } } int trioCount = inputHaploTrios.size() / 4; inputHaploTrios.addAll(inputHaploSingletons); affTrios.addAll(affSingletons); byte[][] input_haplos = (byte[][])inputHaploTrios.toArray(new byte[0][0]); full_em_breakup(input_haplos, block_size, trioCount, affTrios); }
affSingletons.add(new Integer(0));
if(addAff) { affSingletons.add(new Integer(0)); } } if(addAff) { addAff = false; } else { addAff =true;
public void doEM(int[] theBlock) throws HaploViewException{ //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } byte[] thisHap; Vector inputHaploSingletons = new Vector(); Vector inputHaploTrios = new Vector(); Vector affSingletons = new Vector(); Vector affTrios = new Vector(); //whichVector[i] stores a value which indicates which vector chromosome i's genotype should go in //1 indicates inputHaploSingletons (singletons), 2 indicates inputHaploTrios, //3 indicates a person from a broken trio who is treated as a singleton //0 indicates none (too much missing data) int[] whichVector = new int[chromosomes.size()]; for(int i=0;i<numTrios*4; i+=4) { Chromosome parentAFirst = (Chromosome) chromosomes.elementAt(i); Chromosome parentASecond = (Chromosome) chromosomes.elementAt(i+1); Chromosome parentBFirst = (Chromosome) chromosomes.elementAt(i+2); Chromosome parentBSecond = (Chromosome) chromosomes.elementAt(i+3); boolean tooManyMissingInASegmentA = false; boolean tooManyMissingInASegmentB = false; int totalMissingA = 0; int totalMissingB = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missingA = 0; int missingB = 0; for (int j = 0; j < block_size[n]; j++){ byte AFirstGeno = parentAFirst.getGenotype(theBlock[segmentShift+j]); byte ASecondGeno = parentASecond.getGenotype(theBlock[segmentShift+j]); byte BFirstGeno = parentBFirst.getGenotype(theBlock[segmentShift+j]); byte BSecondGeno = parentBSecond.getGenotype(theBlock[segmentShift+j]); if(AFirstGeno == 0 || ASecondGeno == 0) missingA++; if(BFirstGeno == 0 || BSecondGeno == 0) missingB++; } segmentShift += block_size[n]; if (missingA >= MISSINGLIMIT){ tooManyMissingInASegmentA = true; } if (missingB >= MISSINGLIMIT){ tooManyMissingInASegmentB = true; } totalMissingA += missingA; totalMissingB += missingB; } if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3 && !tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //both parents are good so all 4 chroms are added as a trio whichVector[i] = 2; whichVector[i+1] = 2; whichVector[i+2] = 2; whichVector[i+3] = 2; } else if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3) { //first person good, so he's added as a singleton, other parent is dropped whichVector[i] = 3; whichVector[i+1] =3; whichVector[i+2] =0; whichVector[i+3]=0; } else if(!tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //second person good, so he's added as a singleton, other parent is dropped whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =3; whichVector[i+3]=3; } else { //both people have too much missing data so neither is used whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =0; whichVector[i+3]=0; } } for (int i = numTrios*4; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); boolean tooManyMissingInASegment = false; int totalMissing = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missing = 0; for (int j = 0; j < block_size[n]; j++){ byte theGeno = thisChrom.getGenotype(theBlock[segmentShift+j]); byte nextGeno = nextChrom.getGenotype(theBlock[segmentShift+j]); if(theGeno == 0 || nextGeno == 0) missing++; } segmentShift += block_size[n]; if (missing >= MISSINGLIMIT){ tooManyMissingInASegment = true; } totalMissing += missing; } //we want to use chromosomes without too many missing genotypes in a given //subsegment (first term) or without too many missing genotypes in the //whole block (second term) if (!tooManyMissingInASegment && totalMissing <= 1+theBlock.length/3){ whichVector[i-1] = 1; whichVector[i] = 1; } } for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); if(whichVector[i] > 0) { thisHap = new byte[theBlock.length]; for (int j = 0; j < theBlock.length; j++){ byte a1 = Chromosome.getMarker(theBlock[j]).getMajor(); byte a2 = Chromosome.getMarker(theBlock[j]).getMinor(); byte theGeno = thisChrom.getGenotype(theBlock[j]); if (theGeno >= 5){ thisHap[j] = 'h'; } else { if (theGeno == a1){ thisHap[j] = '1'; }else if (theGeno == a2){ thisHap[j] = '2'; }else{ thisHap[j] = '0'; } } } if(whichVector[i] == 1) { inputHaploSingletons.add(thisHap); affSingletons.add(new Integer(thisChrom.getAffected())); } else if(whichVector[i] ==2) { inputHaploTrios.add(thisHap); affTrios.add(new Integer(thisChrom.getAffected())); }else if (whichVector[i] == 3){ inputHaploSingletons.add(thisHap); affSingletons.add(new Integer(0)); } } } int trioCount = inputHaploTrios.size() / 4; inputHaploTrios.addAll(inputHaploSingletons); affTrios.addAll(affSingletons); byte[][] input_haplos = (byte[][])inputHaploTrios.toArray(new byte[0][0]); full_em_breakup(input_haplos, block_size, trioCount, affTrios); }
retval=false;
retval=true;
boolean kid_consistent(int chap1, int chap2, int num_blocks, int[] block_size, int[][] hlist, int[] num_hlist, int this_trio, int num_loci) { int i, val; boolean retval; int[] temp1 = decode_haplo_str(chap1,num_blocks,block_size,hlist,num_hlist); int[] temp2 = decode_haplo_str(chap2,num_blocks,block_size,hlist,num_hlist); retval=false; for (i=0; i<num_loci; i++) { if (ambighet[this_trio][i] == 0) { //TODO:ask mark if this if statement should break out this way //is this what this method should be doing? if (temp1[i] == temp2[i]) { retval=true; break; } } } return(retval); }
if (ambighet[this_trio][i] == 0) {
if (ambighet[this_trio][i] !=0) {
boolean kid_consistent(int chap1, int chap2, int num_blocks, int[] block_size, int[][] hlist, int[] num_hlist, int this_trio, int num_loci) { int i, val; boolean retval; int[] temp1 = decode_haplo_str(chap1,num_blocks,block_size,hlist,num_hlist); int[] temp2 = decode_haplo_str(chap2,num_blocks,block_size,hlist,num_hlist); retval=false; for (i=0; i<num_loci; i++) { if (ambighet[this_trio][i] == 0) { //TODO:ask mark if this if statement should break out this way //is this what this method should be doing? if (temp1[i] == temp2[i]) { retval=true; break; } } } return(retval); }
retval=true;
retval=false;
boolean kid_consistent(int chap1, int chap2, int num_blocks, int[] block_size, int[][] hlist, int[] num_hlist, int this_trio, int num_loci) { int i, val; boolean retval; int[] temp1 = decode_haplo_str(chap1,num_blocks,block_size,hlist,num_hlist); int[] temp2 = decode_haplo_str(chap2,num_blocks,block_size,hlist,num_hlist); retval=false; for (i=0; i<num_loci; i++) { if (ambighet[this_trio][i] == 0) { //TODO:ask mark if this if statement should break out this way //is this what this method should be doing? if (temp1[i] == temp2[i]) { retval=true; break; } } } return(retval); }
Vector blockResults = new AssociationTestSet(blockHaps, names).getResults();
Vector blockResults = new AssociationTestSet(blockHaps, names, alleles).getResults();
public void runFileTests(HaploData theData, Vector inputSNPResults) throws HaploViewException { Vector res = new Vector(); if(tests == null || theData == null) { return; } Vector blocks = new Vector(); Vector names = new Vector(); for(int i=0;i<tests.size();i++) { //first go through and get all the multimarker tests to package up to hand to theData.generateHaplotypes() AssociationTest currentTest = (AssociationTest) tests.get(i); if(currentTest.getNumMarkers() > 1) { blocks.add(currentTest.getFilteredMarkerArray()); names.add(currentTest.getName()); } } Haplotype[][] blockHaps = theData.generateHaplotypes(blocks, true); Vector blockResults = new AssociationTestSet(blockHaps, names).getResults(); Iterator britr = blockResults.iterator(); for (int i = 0; i < tests.size(); i++){ AssociationTest currentTest = (AssociationTest) tests.get(i); if(currentTest.getNumMarkers() > 1) { //grab the next block result from above //check to see if a specific allele was given HaplotypeAssociationResult har = (HaplotypeAssociationResult) britr.next(); //todo: this is borken. needs count of all other alleles. if (currentTest.getAllele() != null){ boolean foundAllele = false; for (int j = 0; j < har.getAlleleCount(); j++){ if (har.getNumericAlleleName(j).equals(currentTest.getAllele())){ Haplotype[] filtHaps = {har.getHaps()[j]}; res.add(new HaplotypeAssociationResult(filtHaps,0,har.getName())); foundAllele = true; break; } } if (!foundAllele){ throw new HaploViewException(currentTest.getAllele() + ": no such allele for test:\n" + har.getName()); } }else{ res.add(har); } }else if (currentTest.getNumMarkers() == 1){ //grab appropriate single marker result. res.add(inputSNPResults.get(currentTest.getMarkerArray()[0])); } } results = res; }
if (currentTest.getAllele() != null){ boolean foundAllele = false; for (int j = 0; j < har.getAlleleCount(); j++){ if (har.getNumericAlleleName(j).equals(currentTest.getAllele())){ Haplotype[] filtHaps = {har.getHaps()[j]}; res.add(new HaplotypeAssociationResult(filtHaps,0,har.getName())); foundAllele = true; break; } } if (!foundAllele){ throw new HaploViewException(currentTest.getAllele() + ": no such allele for test:\n" + har.getName()); } }else{ res.add(har); }
res.add(har);
public void runFileTests(HaploData theData, Vector inputSNPResults) throws HaploViewException { Vector res = new Vector(); if(tests == null || theData == null) { return; } Vector blocks = new Vector(); Vector names = new Vector(); for(int i=0;i<tests.size();i++) { //first go through and get all the multimarker tests to package up to hand to theData.generateHaplotypes() AssociationTest currentTest = (AssociationTest) tests.get(i); if(currentTest.getNumMarkers() > 1) { blocks.add(currentTest.getFilteredMarkerArray()); names.add(currentTest.getName()); } } Haplotype[][] blockHaps = theData.generateHaplotypes(blocks, true); Vector blockResults = new AssociationTestSet(blockHaps, names).getResults(); Iterator britr = blockResults.iterator(); for (int i = 0; i < tests.size(); i++){ AssociationTest currentTest = (AssociationTest) tests.get(i); if(currentTest.getNumMarkers() > 1) { //grab the next block result from above //check to see if a specific allele was given HaplotypeAssociationResult har = (HaplotypeAssociationResult) britr.next(); //todo: this is borken. needs count of all other alleles. if (currentTest.getAllele() != null){ boolean foundAllele = false; for (int j = 0; j < har.getAlleleCount(); j++){ if (har.getNumericAlleleName(j).equals(currentTest.getAllele())){ Haplotype[] filtHaps = {har.getHaps()[j]}; res.add(new HaplotypeAssociationResult(filtHaps,0,har.getName())); foundAllele = true; break; } } if (!foundAllele){ throw new HaploViewException(currentTest.getAllele() + ": no such allele for test:\n" + har.getName()); } }else{ res.add(har); } }else if (currentTest.getNumMarkers() == 1){ //grab appropriate single marker result. res.add(inputSNPResults.get(currentTest.getMarkerArray()[0])); } } results = res; }
public void doTag(XMLOutput output) throws Exception {
public void doTag(XMLOutput output) {
public void doTag(XMLOutput output) throws Exception { }
tempVect.add(new Integer(i+1));
tempVect.add(new Integer(Chromosome.realIndex[i]+1));
public void refreshTable(){ this.removeAll(); Vector tableData = new Vector(); int numRes = Chromosome.getFilteredSize(); for (int i = 0; i < numRes; i++){ Vector tempVect = new Vector(); TDTResult currentResult = (TDTResult)result.get(Chromosome.realIndex[i]); tempVect.add(new Integer(i+1)); tempVect.add(currentResult.getName()); tempVect.add(currentResult.getOverTransmittedAllele(type)); tempVect.add(currentResult.getTURatio(type)); tempVect.add(new Double(currentResult.getChiSq(type))); tempVect.add(currentResult.getPValue()); tableData.add(tempVect.clone()); } table = new JTable(tableData,tableColumnNames); table.getColumnModel().getColumn(0).setPreferredWidth(50); table.getColumnModel().getColumn(1).setPreferredWidth(100); if (type != 1){ table.getColumnModel().getColumn(3).setPreferredWidth(160); } table.getColumnModel().getColumn(2).setPreferredWidth(100); JScrollPane tableScroller = new JScrollPane(table); add(tableScroller); }
set( name, value );
name = convertVariableName(name); if (name != null) { set( name, value ); }
public void setJellyContext(JellyContext context) throws EvalError { this.context = context; // now pass in all the variables for ( Iterator iter = context.getVariableNames(); iter.hasNext(); ) { String name = (String) iter.next(); Object value = context.getVariable(name); set( name, value ); } // lets pass in the Jelly context set( "jellyContext", context ); }
set( "jellyContext", context );
set( "context", context );
public void setJellyContext(JellyContext context) throws EvalError { this.context = context; // now pass in all the variables for ( Iterator iter = context.getVariableNames(); iter.hasNext(); ) { String name = (String) iter.next(); Object value = context.getVariable(name); set( name, value ); } // lets pass in the Jelly context set( "jellyContext", context ); }
if(!AuthConstants.USER_ADMIN.equals(user.getUsername())) userForm.setRole(((Role)user.getRoles().get(0)).getName());
if(!AuthConstants.USER_ADMIN.equals(user.getUsername())){ String[] roles = new String[user.getRoles().size()]; int ctr = 0; for(Iterator it= user.getRoles().iterator(); it.hasNext();){ roles[ctr++] = ((Role)it.next()).getName(); } userForm.setRole(roles); }
private void prepareUserForm(ActionForm form, User user){ UserForm userForm = (UserForm)form; userForm.setUsername(user.getUsername()); userForm.setPassword(UserForm.FORM_PASSWORD); userForm.setConfirmPassword(UserForm.FORM_PASSWORD); //TODO Need to handle multiple role scenario if(!AuthConstants.USER_ADMIN.equals(user.getUsername())) userForm.setRole(((Role)user.getRoles().get(0)).getName()); userForm.setStatus(user.getStatus()); }
"One or more males in the file is heterozygous.\nThese genotypes have been ignored.",
"At least one male in the file is heterozygous.\nThese genotypes have been ignored.",
void readGenotypes(String[] inputOptions, int type){ //input is a 2 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = custom association test list file (null if none) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); final AssociationTestSet customAssocSet; try { if (inputOptions[2] != null && inputOptions[1] == null){ throw new HaploViewException("A marker information file is required if a tests file is specified."); } if (inputOptions[1] == null && Options.getAssocTest() != ASSOC_NONE){ throw new HaploViewException("A marker information file is required for association tests."); } this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS_FILE){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_ASSOC_NUM].setEnabled(false); Options.setAssocTest(ASSOC_NONE); } theData = new HaploData(); if (type == HAPS_FILE){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ theData.linkageToChrom(inFile, type); } if(theData.getPedFile().isBogusParents()) { JOptionPane.showMessageDialog(this, "One or more individuals in the file reference non-existent parents.\nThese references have been ignored.", "File Error", JOptionPane.ERROR_MESSAGE); } if(theData.getPedFile().isHaploidHets()) { JOptionPane.showMessageDialog(this, "One or more males in the file is heterozygous.\nThese genotypes have been ignored.", "File Error", JOptionPane.ERROR_MESSAGE); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1] == null){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } //turn on/off gbrowse menu if (Options.isGBrowseShown()){ gbEditItem.setEnabled(true); }else{ gbEditItem.setEnabled(false); } if (type == HAPS_FILE){ readMarkers(markerFile, null); HashSet emptyHashSet = new HashSet(); //initialize realIndex Chromosome.doFilter(Chromosome.getUnfilteredSize()); customAssocSet = null; theData.getPedFile().setWhiteList(emptyHashSet); checkPanel = new CheckDataPanel(this); }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); //we read the file in first, so we can whitelist all the markers in the custom test set HashSet whiteListedCustomMarkers = new HashSet(); if (inputOptions[2] != null){ customAssocSet = new AssociationTestSet(inputOptions[2]); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } theData.getPedFile().setWhiteList(whiteListedCustomMarkers); checkPanel = new CheckDataPanel(this); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); //set up the indexing to take into account skipped markers. Chromosome.doFilter(checkPanel.getMarkerResults()); } //let's start the math final SwingWorker worker = new SwingWorker(){ public Object construct(){ Container contents = getContentPane(); contents.removeAll(); contents.repaint(); defaultLayout = contents.getLayout(); contents.setLayout(new GridBagLayout()); haploProgress = new JProgressBar(0,2); haploProgress.setValue(0); haploProgress.setStringPainted(true); haploProgress.setForeground(new Color(40,40,255)); haploProgress.setPreferredSize(new Dimension(250,20)); progressPanel.setLayout(new BoxLayout(progressPanel,BoxLayout.Y_AXIS)); JLabel progressLabel = new JLabel("Loading data..."); progressPanel.add(progressLabel); progressLabel.setAlignmentX(CENTER_ALIGNMENT); progressPanel.add(haploProgress); contents.add(progressPanel); progressPanel.revalidate(); for (int i = 0; i < viewMenuItems.length; i++){ viewMenuItems[i].setEnabled(false); } dPrimeDisplay=null; changeKey(); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; contents = getContentPane(); contents.removeAll(); tabs = new HaploviewTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); HaploviewTab ldTab = new HaploviewTab(dPrimeDisplay); ldTab.add(dPrimeScroller); tabs.addTab(VIEW_DPRIME, ldTab); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); HaploviewTab hapsTab = new HaploviewTab(hapDisplay); hapsTab.add(hapScroller); hapsTab.add(hdc); tabs.addTab(VIEW_HAPLOTYPES, hapsTab); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); displayMenu.setEnabled(true); analysisMenu.setEnabled(true); //check data panel HaploviewTab checkTab = new HaploviewTab(checkPanel); checkTab.add(checkPanel); CheckDataController cdc = new CheckDataController(checkPanel); checkTab.add(cdc); tabs.addTab(VIEW_CHECK_PANEL, checkTab); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); tabs.setSelectedComponent(checkTab); //only show tagger if we have a .info file if (theData.infoKnown){ //tagger display taggerConfigPanel = new TaggerConfigPanel(theData); HaploviewTabbedPane tagTabs = new HaploviewTabbedPane(); tagTabs.add("Configuration",taggerConfigPanel); taggerResultsPanel = new TaggerResultsPanel(); taggerConfigPanel.addActionListener(taggerResultsPanel); tagTabs.addTab("Results",taggerResultsPanel); HaploviewTab taggerTab = new HaploviewTab(tagTabs); taggerTab.add(tagTabs); tabs.addTab(VIEW_TAGGER,taggerTab); viewMenuItems[VIEW_TAGGER_NUM].setEnabled(true); } //Association panel if(Options.getAssocTest() != ASSOC_NONE) { HaploviewTabbedPane metaAssoc = new HaploviewTabbedPane(); try{ tdtPanel = new TDTPanel(new AssociationTestSet(theData.getPedFile(), null,null, Chromosome.getAllMarkers())); } catch(PedFileException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } metaAssoc.add("Single Marker", tdtPanel); if (!Chromosome.getDataChrom().equalsIgnoreCase("chrx")){ hapAssocPanel = new HaploAssocPanel(new AssociationTestSet(theData.getHaplotypes(), null)); }else{ hapAssocPanel = new HaploAssocPanel(new AssociationTestSet(null,null)); } metaAssoc.add("Haplotypes", hapAssocPanel); //custom association tests custAssocPanel = null; if(customAssocSet != null) { try { customAssocSet.runFileTests(theData, tdtPanel.getTestSet().getMarkerAssociationResults()); custAssocPanel = new CustomAssocPanel(customAssocSet); metaAssoc.addTab("Custom",custAssocPanel); metaAssoc.setSelectedComponent(custAssocPanel); } catch (HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } } AssociationTestSet custPermSet = null; if (custAssocPanel != null){ custPermSet = custAssocPanel.getTestSet(); } AssociationTestSet permSet = new AssociationTestSet(); permSet.cat(tdtPanel.getTestSet()); permSet.cat(hapAssocPanel.getTestSet()); permutationPanel = new PermutationTestPanel( new PermutationTestSet(0,theData.getPedFile(),custPermSet, permSet)); metaAssoc.add(permutationPanel,"Permutation Tests"); HaploviewTab associationTab = new HaploviewTab(metaAssoc); associationTab.add(metaAssoc); tabs.addTab(VIEW_ASSOC, associationTab); viewMenuItems[VIEW_ASSOC_NUM].setEnabled(true); } contents.remove(progressPanel); contents.setLayout(defaultLayout); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (isMaxSet == true){ haploProgress.setValue(theData.dPrimeCount); } if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); progressPanel.removeAll(); isMaxSet = false; theData.dPrimeCount = 0; theData.dPrimeTotalCount = -1; setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } if (theData.dPrimeTotalCount != -1 && isMaxSet == false){ haploProgress.setMaximum(theData.dPrimeTotalCount); isMaxSet = true; } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } }
throw new RuntimeException( "foo" );
public AntTagLibrary() { this.project = new Project(); BuildLogger logger = new NoBannerLogger(); logger.setMessageOutputLevel( org.apache.tools.ant.Project.MSG_INFO ); logger.setOutputPrintStream( System.out ); logger.setErrorPrintStream( System.err); project.addBuildListener( logger ); project.init(); throw new RuntimeException( "foo" ); }
return tagFactory.createTag();
return tagFactory.createTag(localName, getSaxAttributes());
protected Tag createTag() throws Exception { if ( tagFactory != null) { return tagFactory.createTag(); } return null; }
logger.debug("Changing nullable "+oldNullable+" -> "+argNullable);
public void setNullable(int argNullable) { int oldNullable = this.nullable; if (this.nullable != argNullable) { this.nullable = argNullable; fireDbObjectChanged("nullable",oldNullable,argNullable); } }
SQLObject p = parent;
this.primaryKeySeq = argPrimaryKeySeq; fireDbObjectChanged("primaryKeySeq",oldPrimaryKeySeq,argPrimaryKeySeq); SQLObject p = parent;
public void setPrimaryKeySeq(Integer argPrimaryKeySeq) { // do nothing if there's no change if ( (primaryKeySeq == null && argPrimaryKeySeq == null) || (primaryKeySeq != null && primaryKeySeq.equals(argPrimaryKeySeq)) ) { return; } try { startCompoundEdit("Starting PrimaryKeySeq compound edit"); Integer oldPrimaryKeySeq = primaryKeySeq; if (argPrimaryKeySeq != null && !this.autoIncrement) { setNullable(DatabaseMetaData.columnNoNulls); } SQLObject p = parent; if (p != null) { p.removeChild(this); } this.primaryKeySeq = argPrimaryKeySeq; if (p != null) { int idx = 0; int targetPKS = primaryKeySeq == null ? Integer.MAX_VALUE : primaryKeySeq.intValue(); logger.debug("Parent = "+p); logger.debug("Parent.children = "+p.children); for (SQLColumn col : (List<SQLColumn>) p.children) { if (col.getPrimaryKeySeq() == null || col.getPrimaryKeySeq() > targetPKS) { logger.debug("idx is " + idx); break; } idx++; } p.addChild(idx, this); getParentTable().normalizePrimaryKey(); } fireDbObjectChanged("primaryKeySeq",oldPrimaryKeySeq,argPrimaryKeySeq); } catch (ArchitectException e) { throw new ArchitectRuntimeException(e); } finally { endCompoundEdit("Ending PrimaryKeySeq compound edit"); } }
} this.primaryKeySeq = argPrimaryKeySeq; if (p != null) {
public void setPrimaryKeySeq(Integer argPrimaryKeySeq) { // do nothing if there's no change if ( (primaryKeySeq == null && argPrimaryKeySeq == null) || (primaryKeySeq != null && primaryKeySeq.equals(argPrimaryKeySeq)) ) { return; } try { startCompoundEdit("Starting PrimaryKeySeq compound edit"); Integer oldPrimaryKeySeq = primaryKeySeq; if (argPrimaryKeySeq != null && !this.autoIncrement) { setNullable(DatabaseMetaData.columnNoNulls); } SQLObject p = parent; if (p != null) { p.removeChild(this); } this.primaryKeySeq = argPrimaryKeySeq; if (p != null) { int idx = 0; int targetPKS = primaryKeySeq == null ? Integer.MAX_VALUE : primaryKeySeq.intValue(); logger.debug("Parent = "+p); logger.debug("Parent.children = "+p.children); for (SQLColumn col : (List<SQLColumn>) p.children) { if (col.getPrimaryKeySeq() == null || col.getPrimaryKeySeq() > targetPKS) { logger.debug("idx is " + idx); break; } idx++; } p.addChild(idx, this); getParentTable().normalizePrimaryKey(); } fireDbObjectChanged("primaryKeySeq",oldPrimaryKeySeq,argPrimaryKeySeq); } catch (ArchitectException e) { throw new ArchitectRuntimeException(e); } finally { endCompoundEdit("Ending PrimaryKeySeq compound edit"); } }
} fireDbObjectChanged("primaryKeySeq",oldPrimaryKeySeq,argPrimaryKeySeq);
}
public void setPrimaryKeySeq(Integer argPrimaryKeySeq) { // do nothing if there's no change if ( (primaryKeySeq == null && argPrimaryKeySeq == null) || (primaryKeySeq != null && primaryKeySeq.equals(argPrimaryKeySeq)) ) { return; } try { startCompoundEdit("Starting PrimaryKeySeq compound edit"); Integer oldPrimaryKeySeq = primaryKeySeq; if (argPrimaryKeySeq != null && !this.autoIncrement) { setNullable(DatabaseMetaData.columnNoNulls); } SQLObject p = parent; if (p != null) { p.removeChild(this); } this.primaryKeySeq = argPrimaryKeySeq; if (p != null) { int idx = 0; int targetPKS = primaryKeySeq == null ? Integer.MAX_VALUE : primaryKeySeq.intValue(); logger.debug("Parent = "+p); logger.debug("Parent.children = "+p.children); for (SQLColumn col : (List<SQLColumn>) p.children) { if (col.getPrimaryKeySeq() == null || col.getPrimaryKeySeq() > targetPKS) { logger.debug("idx is " + idx); break; } idx++; } p.addChild(idx, this); getParentTable().normalizePrimaryKey(); } fireDbObjectChanged("primaryKeySeq",oldPrimaryKeySeq,argPrimaryKeySeq); } catch (ArchitectException e) { throw new ArchitectRuntimeException(e); } finally { endCompoundEdit("Ending PrimaryKeySeq compound edit"); } }
JPanel arsePanel = new JPanel(); arsePanel.setBackground(Color.RED); arsePanel.setPreferredSize(new Dimension(100,100)); arsePanel.setBounds(100,100,200,200); arsePanel.setOpaque(true);
void drawPicture(HaploData theData){ Container contents = getContentPane(); contents.removeAll(); //remember which tab we're in if they've already been set up int currentTabIndex = 0; if (!(tabs == null)){ currentTabIndex = tabs.getSelectedIndex(); } tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(theData.dPrimeTable, infoKnown, theData.markerInfo); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[0], panel); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); hapDisplay = new HaplotypeDisplay(theData); HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); JScrollPane hapScroller = new JScrollPane(hapDisplay); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[1], panel); tabs.setSelectedIndex(currentTabIndex); contents.add(tabs); //next add a little spacer //ontents.add(Box.createRigidArea(new Dimension(0,5))); //and then add the block display //theBlocks = new BlockDisplay(theData.markerInfo, theData.blocks, dPrimeDisplay, infoKnown); //contents.setBackground(Color.black); //put the block display in a scroll pane in case the data set is very large. //JScrollPane blockScroller = new JScrollPane(theBlocks, // JScrollPane.VERTICAL_SCROLLBAR_NEVER, // JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); //blockScroller.getHorizontalScrollBar().setUnitIncrement(60); //blockScroller.setMinimumSize(new Dimension(800, 100)); //contents.add(blockScroller); repaint(); setVisible(true); }
HaploView.setDefaultLookAndFeelDecorated(true);
public static void main(String[] args) {//throws IOException{ if(args.length>0){ handleFlags(args); } else { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (Exception e) { } //setup view object HaploView window = new HaploView(); window.setTitle("HaploView beta"); window.setSize(800,600); //center the window on the screen Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); window.setLocation((screen.width - window.getWidth()) / 2, (screen.height - window.getHeight()) / 2); window.setVisible(true); ReadDataDialog readDialog = new ReadDataDialog("Welcome to HaploView", window); readDialog.pack(); readDialog.setVisible(true); } }
g2.scale(zoom, zoom); FontRenderContext frc = g2.getFontRenderContext();
FontRenderContext frc = null; if (g2 != null) { g2.scale(zoom, zoom); frc = g2.getFontRenderContext(); g2.dispose(); }
public FontRenderContext getFontRenderContext() { Graphics2D g2 = (Graphics2D) getGraphics(); g2.scale(zoom, zoom); FontRenderContext frc = g2.getFontRenderContext(); if (logger.isDebugEnabled()) logger.debug("Returning frc="+frc); return frc; }
sep = Chromosome.getFilteredMarker(y).getPosition() - Chromosome.getFilteredMarker(x).getPosition();
sep = Math.abs(Chromosome.getFilteredMarker(y).getPosition() - Chromosome.getFilteredMarker(x).getPosition());
static Vector doSFS(PairwiseLinkage[][] dPrime){ int numStrong = 0; int numRec = 0; int numInGroup = 0; Vector blocks = new Vector(); Vector strongPairs = new Vector(); //first set up a filter of markers which fail the MAF threshhold boolean[] skipMarker = new boolean[dPrime.length]; for (int x = 0; x < dPrime.length; x++){ if (Chromosome.getFilteredMarker(x).getMAF() < mafThresh){ skipMarker[x]=true; }else{ skipMarker[x]=false; } } //next make a list of marker pairs in "strong LD", sorted by distance apart for (int x = 0; x < dPrime.length-1; x++){ for (int y = x+1; y < dPrime.length; y++){ PairwiseLinkage thisPair = dPrime[x][y]; if (thisPair == null){ continue; } //get the right bits double lod = thisPair.getLOD(); double lowCI = thisPair.getConfidenceLow(); double highCI = thisPair.getConfidenceHigh(); if (skipMarker[x] || skipMarker[y]) continue; if (lod < -90) continue; //missing data if (highCI < cutHighCI || lowCI < cutLowCI) continue; //must pass "strong LD" test Vector addMe = new Vector(); //a vector of x, y, separation long sep; //compute actual separation sep = Chromosome.getFilteredMarker(y).getPosition() - Chromosome.getFilteredMarker(x).getPosition(); addMe.add(String.valueOf(x)); addMe.add(String.valueOf(y)); addMe.add(String.valueOf(sep)); if (strongPairs.size() == 0){ //put first pair first strongPairs.add(addMe); }else{ //sort by descending separation of markers in each pair boolean unplaced = true; for (int v = 0; v < strongPairs.size(); v ++){ if (sep >= Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(2))){ strongPairs.insertElementAt(addMe, v); unplaced = false; break; } } if (unplaced){strongPairs.add(addMe);} } } } //now take this list of pairs with "strong LD" and construct blocks boolean[] usedInBlock = new boolean[dPrime.length + 1]; Vector thisBlock; int[] blockArray; for (int v = 0; v < strongPairs.size(); v++){ numStrong = 0; numRec = 0; numInGroup = 0; thisBlock = new Vector(); int first = Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(0)); int last = Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(1)); int sep = Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(2)); //first see if this block overlaps with another: if (usedInBlock[first] || usedInBlock[last]) continue; //next, count the number of markers in the block. for (int x = first; x <=last ; x++){ if(!skipMarker[x]) numInGroup++; } //skip it if it is too long in bases for it's size in markers if (numInGroup < 4 && sep > maxDist[numInGroup]) continue; thisBlock.add(new Integer(first)); //test this block. requires 95% of informative markers to be "strong" for (int y = first+1; y <= last; y++){ if (skipMarker[y]) continue; thisBlock.add(new Integer(y)); //loop over columns in row y for (int x = first; x < y; x++){ if (skipMarker[x]) continue; PairwiseLinkage thisPair = dPrime[x][y]; if (thisPair == null){ continue; } //get the right bits double lod = thisPair.getLOD(); double lowCI = thisPair.getConfidenceLow(); double highCI = thisPair.getConfidenceHigh(); if (lod < -90) continue; //monomorphic marker error if (lod == 0 && lowCI == 0 && highCI == 0) continue; //skip bad markers //for small blocks use different CI cutoffs if (numInGroup < 5){ if (lowCI > cutLowCIVar[numInGroup] && highCI >= cutHighCI) numStrong++; }else{ if (lowCI > cutLowCI && highCI >= cutHighCI) numStrong++; //strong LD } if (highCI < recHighCI) numRec++; //recombination } } //change the definition somewhat for small blocks if (numInGroup > 3){ if (numStrong + numRec < 6) continue; }else if (numInGroup > 2){ if (numStrong + numRec < 3) continue; }else{ if (numStrong + numRec < 1) continue; } blockArray = new int[thisBlock.size()]; for (int z = 0; z < thisBlock.size(); z++){ blockArray[z] = ((Integer)thisBlock.elementAt(z)).intValue(); } // System.out.println(first + " " + last + " " + numStrong + " " + numRec); if ((double)numStrong/(double)(numStrong + numRec) > informFrac){ //this qualifies as a block //add to the block list, but in order by first marker number: if (blocks.size() == 0){ //put first block first blocks.add(blockArray); }else{ //sort by ascending separation of markers in each pair boolean placed = false; for (int b = 0; b < blocks.size(); b ++){ if (first < ((int[])blocks.elementAt(b))[0]){ blocks.insertElementAt(blockArray, b); placed = true; break; } } //make sure to put in blocks which fall on the tail end if (!placed) blocks.add(blockArray); } for (int used = first; used <= last; used++){ usedInBlock[used] = true; } } } return blocks; }
int sep = Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(2));
int sep = Math.abs(Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(2)));
static Vector doSFS(PairwiseLinkage[][] dPrime){ int numStrong = 0; int numRec = 0; int numInGroup = 0; Vector blocks = new Vector(); Vector strongPairs = new Vector(); //first set up a filter of markers which fail the MAF threshhold boolean[] skipMarker = new boolean[dPrime.length]; for (int x = 0; x < dPrime.length; x++){ if (Chromosome.getFilteredMarker(x).getMAF() < mafThresh){ skipMarker[x]=true; }else{ skipMarker[x]=false; } } //next make a list of marker pairs in "strong LD", sorted by distance apart for (int x = 0; x < dPrime.length-1; x++){ for (int y = x+1; y < dPrime.length; y++){ PairwiseLinkage thisPair = dPrime[x][y]; if (thisPair == null){ continue; } //get the right bits double lod = thisPair.getLOD(); double lowCI = thisPair.getConfidenceLow(); double highCI = thisPair.getConfidenceHigh(); if (skipMarker[x] || skipMarker[y]) continue; if (lod < -90) continue; //missing data if (highCI < cutHighCI || lowCI < cutLowCI) continue; //must pass "strong LD" test Vector addMe = new Vector(); //a vector of x, y, separation long sep; //compute actual separation sep = Chromosome.getFilteredMarker(y).getPosition() - Chromosome.getFilteredMarker(x).getPosition(); addMe.add(String.valueOf(x)); addMe.add(String.valueOf(y)); addMe.add(String.valueOf(sep)); if (strongPairs.size() == 0){ //put first pair first strongPairs.add(addMe); }else{ //sort by descending separation of markers in each pair boolean unplaced = true; for (int v = 0; v < strongPairs.size(); v ++){ if (sep >= Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(2))){ strongPairs.insertElementAt(addMe, v); unplaced = false; break; } } if (unplaced){strongPairs.add(addMe);} } } } //now take this list of pairs with "strong LD" and construct blocks boolean[] usedInBlock = new boolean[dPrime.length + 1]; Vector thisBlock; int[] blockArray; for (int v = 0; v < strongPairs.size(); v++){ numStrong = 0; numRec = 0; numInGroup = 0; thisBlock = new Vector(); int first = Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(0)); int last = Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(1)); int sep = Integer.parseInt((String)((Vector)strongPairs.elementAt(v)).elementAt(2)); //first see if this block overlaps with another: if (usedInBlock[first] || usedInBlock[last]) continue; //next, count the number of markers in the block. for (int x = first; x <=last ; x++){ if(!skipMarker[x]) numInGroup++; } //skip it if it is too long in bases for it's size in markers if (numInGroup < 4 && sep > maxDist[numInGroup]) continue; thisBlock.add(new Integer(first)); //test this block. requires 95% of informative markers to be "strong" for (int y = first+1; y <= last; y++){ if (skipMarker[y]) continue; thisBlock.add(new Integer(y)); //loop over columns in row y for (int x = first; x < y; x++){ if (skipMarker[x]) continue; PairwiseLinkage thisPair = dPrime[x][y]; if (thisPair == null){ continue; } //get the right bits double lod = thisPair.getLOD(); double lowCI = thisPair.getConfidenceLow(); double highCI = thisPair.getConfidenceHigh(); if (lod < -90) continue; //monomorphic marker error if (lod == 0 && lowCI == 0 && highCI == 0) continue; //skip bad markers //for small blocks use different CI cutoffs if (numInGroup < 5){ if (lowCI > cutLowCIVar[numInGroup] && highCI >= cutHighCI) numStrong++; }else{ if (lowCI > cutLowCI && highCI >= cutHighCI) numStrong++; //strong LD } if (highCI < recHighCI) numRec++; //recombination } } //change the definition somewhat for small blocks if (numInGroup > 3){ if (numStrong + numRec < 6) continue; }else if (numInGroup > 2){ if (numStrong + numRec < 3) continue; }else{ if (numStrong + numRec < 1) continue; } blockArray = new int[thisBlock.size()]; for (int z = 0; z < thisBlock.size(); z++){ blockArray[z] = ((Integer)thisBlock.elementAt(z)).intValue(); } // System.out.println(first + " " + last + " " + numStrong + " " + numRec); if ((double)numStrong/(double)(numStrong + numRec) > informFrac){ //this qualifies as a block //add to the block list, but in order by first marker number: if (blocks.size() == 0){ //put first block first blocks.add(blockArray); }else{ //sort by ascending separation of markers in each pair boolean placed = false; for (int b = 0; b < blocks.size(); b ++){ if (first < ((int[])blocks.elementAt(b))[0]){ blocks.insertElementAt(blockArray, b); placed = true; break; } } //make sure to put in blocks which fall on the tail end if (!placed) blocks.add(blockArray); } for (int used = first; used <= last; used++){ usedInBlock[used] = true; } } } return blocks; }
Task task = (Task) type.newInstance(); task.setProject(project); task.setTaskName(name); TaskTag tag = new TaskTag( task );
TaskTag tag = new TaskTag( project, type, name );
public TagScript createTagScript(String name, Attributes attributes) throws Exception { Project project = getProject(); // custom Ant tags if ( name.equals("fileScanner") ) { Tag tag = new FileScannerTag(new FileScanner(project)); return TagScript.newInstance(tag); } // is it an Ant task? Class type = (Class) project.getTaskDefinitions().get(name); if ( type != null ) { Task task = (Task) type.newInstance(); task.setProject(project); task.setTaskName(name); TaskTag tag = new TaskTag( task ); tag.setTrim( true ); return TagScript.newInstance(tag); } // an Ant DataType? Object dataType = null; type = (Class) project.getDataTypeDefinitions().get(name); if ( type != null ) { dataType = type.newInstance(); } else { dataType = project.createDataType(name); } if ( dataType != null ) { DataTypeTag tag = new DataTypeTag( name, dataType ); tag.getDynaBean().set( "project", project ); return TagScript.newInstance(tag); } // assume its an Ant property object (classpath, arg etc). Tag tag = new TaskPropertyTag( name ); return TagScript.newInstance(tag); }
Stylesheet stylesheet = getStylesheet(); if ( stylesheet == null ) {
StylesheetTag tag = (StylesheetTag) findAncestorWithClass( StylesheetTag.class ); if (tag == null) {
public void doTag(XMLOutput output) throws Exception { Stylesheet stylesheet = getStylesheet(); if ( stylesheet == null ) { throw new JellyException( "<applyTemplates> tag must be inside a <stylesheet> tag" ); } Object context = getXPathContext(); if ( select != null ) { stylesheet.applyTemplates( context, select ); } else { stylesheet.applyTemplates( context ); } // #### should support MODE!!! }
Object context = getXPathContext();
Stylesheet stylesheet = tag.getStylesheet(); Object source = tag.getXPathSource();
public void doTag(XMLOutput output) throws Exception { Stylesheet stylesheet = getStylesheet(); if ( stylesheet == null ) { throw new JellyException( "<applyTemplates> tag must be inside a <stylesheet> tag" ); } Object context = getXPathContext(); if ( select != null ) { stylesheet.applyTemplates( context, select ); } else { stylesheet.applyTemplates( context ); } // #### should support MODE!!! }
stylesheet.applyTemplates( context, select );
stylesheet.applyTemplates( source, select );
public void doTag(XMLOutput output) throws Exception { Stylesheet stylesheet = getStylesheet(); if ( stylesheet == null ) { throw new JellyException( "<applyTemplates> tag must be inside a <stylesheet> tag" ); } Object context = getXPathContext(); if ( select != null ) { stylesheet.applyTemplates( context, select ); } else { stylesheet.applyTemplates( context ); } // #### should support MODE!!! }
stylesheet.applyTemplates( context );
stylesheet.applyTemplates( source );
public void doTag(XMLOutput output) throws Exception { Stylesheet stylesheet = getStylesheet(); if ( stylesheet == null ) { throw new JellyException( "<applyTemplates> tag must be inside a <stylesheet> tag" ); } Object context = getXPathContext(); if ( select != null ) { stylesheet.applyTemplates( context, select ); } else { stylesheet.applyTemplates( context ); } // #### should support MODE!!! }
if(plinkFileName != null){
if(mapFileName != null){
private void argHandler(String[] args){ argHandlerMessages = new Vector(); int maxDistance = -1; //this means that user didn't specify any output type if it doesn't get changed below blockOutputType = -1; double hapThresh = -1; double minimumMAF=-1; double spacingThresh = -1; double minimumGenoPercent = -1; double hwCutoff = -1; double missingCutoff = -1; int maxMendel = -1; boolean assocTDT = false; boolean assocCC = false; permutationCount = 0; tagging = Tagger.NONE; maxNumTags = Tagger.DEFAULT_MAXNUMTAGS; findTags = true; double cutHighCI = -1; double cutLowCI = -1; double mafThresh = -1; double recHighCI = -1; double informFrac = -1; double fourGameteCutoff = -1; double spineDP = -1; for(int i =0; i < args.length; i++) { if(args[i].equalsIgnoreCase("-help") || args[i].equalsIgnoreCase("-h")) { System.out.println(HELP_OUTPUT); System.exit(0); } else if(args[i].equalsIgnoreCase("-n") || args[i].equalsIgnoreCase("-nogui")) { nogui = true; } else if(args[i].equalsIgnoreCase("-log")){ i++; if (i >= args.length || args[i].charAt(0) == '-'){ logName = "haploview.log"; i--; }else{ logName = args[i]; } } else if(args[i].equalsIgnoreCase("-p") || args[i].equalsIgnoreCase("-pedfile")) { i++; if( i>=args.length || (args[i].charAt(0) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(pedFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last pedfile listed will be used"); } pedFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-pcloadletter")){ die("PC LOADLETTER?! What the fuck does that mean?!"); } else if (args[i].equalsIgnoreCase("-skipcheck") || args[i].equalsIgnoreCase("--skipcheck")){ skipCheck = true; } else if (args[i].equalsIgnoreCase("-excludeMarkers")){ i++; if(i>=args.length || (args[i].charAt(0) == '-')){ die("-excludeMarkers requires a list of markers"); } else { StringTokenizer str = new StringTokenizer(args[i],","); try { StringBuffer sb = new StringBuffer(); if (!quietMode) sb.append("Excluding markers: "); while(str.hasMoreTokens()) { String token = str.nextToken(); if(token.indexOf("..") != -1) { int lastIndex = token.indexOf(".."); int rangeStart = Integer.parseInt(token.substring(0,lastIndex)); int rangeEnd = Integer.parseInt(token.substring(lastIndex+2,token.length())); for(int j=rangeStart;j<=rangeEnd;j++) { if (!quietMode) sb.append(j+" "); excludedMarkers.add(new Integer(j)); } } else { if (!quietMode) sb.append(token+" "); excludedMarkers.add(new Integer(token)); } } argHandlerMessages.add(sb.toString()); } catch(NumberFormatException nfe) { die("-excludeMarkers argument should be of the format: 1,3,5..8,12"); } } } else if(args[i].equalsIgnoreCase("-ha") || args[i].equalsIgnoreCase("-haps")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(hapsFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last haps file listed will be used"); } hapsFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-i") || args[i].equalsIgnoreCase("-info")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(infoFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last info file listed will be used"); } infoFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-a") || args[i].equalsIgnoreCase("-hapmap")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(hapmapFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last hapmap file listed will be used"); } hapmapFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-phasedhmpdata")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(phasedhmpdataFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last phased hapmap data file listed will be used"); } phasedhmpdataFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-phasedhmpsample")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(phasedhmpsampleFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last phased hapmap sample file listed will be used"); } phasedhmpsampleFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-phasedhmplegend")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(phasedhmplegendFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last phased hapmap legend file listed will be used"); } phasedhmplegendFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-phasedhapmapdl")){ phasedhapmapDownload = true; } else if (args[i].equalsIgnoreCase("-plink")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(plinkFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last PLINK file listed will be used"); } plinkFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-map")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(plinkFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last map file listed will be used"); } mapFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-k") || args[i].equalsIgnoreCase("-blocks")) { i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ blockFileName = args[i]; blockOutputType = BLOX_CUSTOM; }else{ die(args[i-1] + " requires a filename"); } } else if (args[i].equalsIgnoreCase("-png")){ outputPNG = true; } else if (args[i].equalsIgnoreCase("-smallpng") || args[i].equalsIgnoreCase("-compressedPNG")){ outputCompressedPNG = true; } else if (args[i].equalsIgnoreCase("-track")){ i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ trackFileName = args[i]; }else{ die("-track requires a filename"); } } else if(args[i].equalsIgnoreCase("-o") || args[i].equalsIgnoreCase("-output") || args[i].equalsIgnoreCase("-blockoutput")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(blockOutputType != -1){ die("Only one block output type argument is allowed."); } if(args[i].equalsIgnoreCase("SFS") || args[i].equalsIgnoreCase("GAB")){ blockOutputType = BLOX_GABRIEL; } else if(args[i].equalsIgnoreCase("GAM")){ blockOutputType = BLOX_4GAM; } else if(args[i].equalsIgnoreCase("MJD") || args[i].equalsIgnoreCase("SPI")){ blockOutputType = BLOX_SPINE; } else if(args[i].equalsIgnoreCase("ALL")) { blockOutputType = BLOX_ALL; } } else { //defaults to SFS output blockOutputType = BLOX_GABRIEL; i--; } } else if(args[i].equalsIgnoreCase("-d") || args[i].equalsIgnoreCase("--dprime") || args[i].equalsIgnoreCase("-dprime")) { outputDprime = true; } else if (args[i].equalsIgnoreCase("-c") || args[i].equalsIgnoreCase("-check")){ outputCheck = true; } else if (args[i].equalsIgnoreCase("-indcheck")){ individualCheck = true; } else if (args[i].equalsIgnoreCase("-mendel")){ mendel = true; } else if (args[i].equalsIgnoreCase("-malehets")){ malehets = true; } else if(args[i].equalsIgnoreCase("-m") || args[i].equalsIgnoreCase("-maxdistance")) { i++; maxDistance = getIntegerArg(args,i); } else if(args[i].equalsIgnoreCase("-b") || args[i].equalsIgnoreCase("-batch")) { //batch mode i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(batchFileName != null){ argHandlerMessages.add("multiple " + args[i-1] + " arguments found. only last batch file listed will be used"); } batchFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-hapthresh")) { i++; hapThresh = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-spacing")) { i++; spacingThresh = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-minMAF")) { i++; minimumMAF = getDoubleArg(args,i,0,0.5); } else if(args[i].equalsIgnoreCase("-minGeno") || args[i].equalsIgnoreCase("-minGenoPercent")) { i++; minimumGenoPercent = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-hwcutoff")) { i++; hwCutoff = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-maxMendel") ) { i++; maxMendel = getIntegerArg(args,i); } else if(args[i].equalsIgnoreCase("-missingcutoff")) { i++; missingCutoff = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-assoctdt")) { assocTDT = true; } else if(args[i].equalsIgnoreCase("-assoccc")) { assocCC = true; } else if(args[i].equalsIgnoreCase("-randomcc")){ assocCC = true; randomizeAffection = true; } else if(args[i].equalsIgnoreCase("-ldcolorscheme")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(args[i].equalsIgnoreCase("default")){ Options.setLDColorScheme(STD_SCHEME); } else if(args[i].equalsIgnoreCase("RSQ")){ Options.setLDColorScheme(RSQ_SCHEME); } else if(args[i].equalsIgnoreCase("DPALT") ){ Options.setLDColorScheme(WMF_SCHEME); } else if(args[i].equalsIgnoreCase("GAB")) { Options.setLDColorScheme(GAB_SCHEME); } else if(args[i].equalsIgnoreCase("GAM")) { Options.setLDColorScheme(GAM_SCHEME); } else if(args[i].equalsIgnoreCase("GOLD")) { Options.setLDColorScheme(GOLD_SCHEME); } } else { //defaults to STD color scheme Options.setLDColorScheme(STD_SCHEME); i--; } } else if(args[i].equalsIgnoreCase("-blockCutHighCI")) { i++; cutHighCI = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockCutLowCI")) { i++; cutLowCI = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockMafThresh")) { i++; mafThresh = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockRecHighCI")) { i++; recHighCI = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockInformFrac")) { i++; informFrac = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-block4GamCut")) { i++; fourGameteCutoff = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockSpineDP")) { i++; spineDP = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-permtests")) { i++; doPermutationTest = true; permutationCount = getIntegerArg(args,i); } else if(args[i].equalsIgnoreCase("-customassoc")) { i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ customAssocTestsFileName = args[i]; }else{ die(args[i-1] + " requires a filename"); } } else if(args[i].equalsIgnoreCase("-aggressiveTagging")) { tagging = Tagger.AGGRESSIVE_TRIPLE; } else if (args[i].equalsIgnoreCase("-pairwiseTagging")){ tagging = Tagger.PAIRWISE_ONLY; } else if (args[i].equalsIgnoreCase("-printalltags")){ Options.setPrintAllTags(true); } else if(args[i].equalsIgnoreCase("-maxNumTags")){ i++; maxNumTags = getIntegerArg(args,i); } else if(args[i].equalsIgnoreCase("-tagrSqCutoff")) { i++; tagRSquaredCutOff = getDoubleArg(args,i,0,1); } else if (args[i].equalsIgnoreCase("-dontaddtags")){ findTags = false; } else if(args[i].equalsIgnoreCase("-tagLODCutoff")) { i++; Options.setTaggerLODCutoff(getDoubleArg(args,i,0,100000)); } else if(args[i].equalsIgnoreCase("-includeTags")) { i++; if(i>=args.length || args[i].charAt(0) == '-') { die(args[i-1] + " requires a list of marker names."); } StringTokenizer str = new StringTokenizer(args[i],","); forceIncludeTags = new Vector(); while(str.hasMoreTokens()) { forceIncludeTags.add(str.nextToken()); } } else if (args[i].equalsIgnoreCase("-includeTagsFile")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { forceIncludeFileName =args[i]; }else { die(args[i-1] + " requires a filename"); } } else if(args[i].equalsIgnoreCase("-excludeTags")) { i++; if(i>=args.length || args[i].charAt(0) == '-') { die("-excludeTags requires a list of marker names."); } StringTokenizer str = new StringTokenizer(args[i],","); forceExcludeTags = new Vector(); while(str.hasMoreTokens()) { forceExcludeTags.add(str.nextToken()); } } else if (args[i].equalsIgnoreCase("-excludeTagsFile")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { forceExcludeFileName =args[i]; }else { die(args[i-1] + " requires a filename"); } } else if (args[i].equalsIgnoreCase("-captureAlleles")){ i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { captureAllelesFileName =args[i]; }else { die(args[i-1] + " requires a filename"); } } else if (args[i].equalsIgnoreCase("-designScores")){ i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { designScoresFileName =args[i]; }else { die(args[i-1] + " requires a filename"); } } else if (args[i].equalsIgnoreCase("-mintagdistance")){ i++; minTagDistance = args[i]; } else if(args[i].equalsIgnoreCase("-chromosome") || args[i].equalsIgnoreCase("-chr")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { chromosomeArg =args[i]; }else { die(args[i-1] + " requires a chromosome name"); } if(!(chromosomeArg.equalsIgnoreCase("X")) && !(chromosomeArg.equalsIgnoreCase("Y"))){ try{ if (Integer.parseInt(chromosomeArg) > 22){ die("-chromosome requires a chromsome name of 1-22, X, or Y"); } }catch(NumberFormatException nfe){ die("-chromosome requires a chromsome name of 1-22, X, or Y"); } } } else if(args[i].equalsIgnoreCase("-population")){ i++; if(!(i>=args.length) && !(args[i].charAt(0)== '-')) { populationArg = args[i]; }else { die(args[i-1] + "requires a population name"); } } else if(args[i].equalsIgnoreCase("-startpos")){ i++; startPos = args[i]; } else if(args[i].equalsIgnoreCase("-endPos")){ i++; endPos = args[i]; } else if(args[i].equalsIgnoreCase("-release")){ i++; release = args[i]; } else if(args[i].equalsIgnoreCase("-q") || args[i].equalsIgnoreCase("-quiet")) { quietMode = true; } else if(args[i].equalsIgnoreCase("-gzip")){ Options.setGzip(true); } else { die("invalid parameter specified: " + args[i]); } } if (logName != null){ logString = "*****************************************************\n" + TITLE_STRING + "\tJava Version: " + JAVA_VERSION + "\n*****************************************************\n\n\n" + "Arguments:\t"; for (int i = 0; i < args.length; i++){ logString = logString + args[i] + "\t"; } logString = logString + "\n\n"; } int countOptions = 0; if(pedFileName != null) { countOptions++; } if(hapsFileName != null) { countOptions++; } if(hapmapFileName != null) { countOptions++; } if(phasedhmpdataFileName != null) { countOptions++; if(phasedhmpsampleFileName == null){ die("You must specify a sample file for phased hapmap input."); }else if(phasedhmplegendFileName == null){ die("You must specify a legend file for phased hapmap input."); } } if(phasedhapmapDownload) { countOptions++; } if(plinkFileName != null){ countOptions++; if(mapFileName == null){ die("You must specify a map file for plink format input."); } } if(batchFileName != null) { countOptions++; } if(countOptions > 1) { die("Only one genotype input file may be specified on the command line."); } else if(countOptions == 0 && nogui) { die("You must specify a genotype input file."); } //mess with vars, set defaults, etc if(skipCheck) { argHandlerMessages.add("Skipping genotype file check"); } if(maxDistance == -1){ maxDistance = MAXDIST_DEFAULT; }else{ argHandlerMessages.add("Max LD comparison distance = " +maxDistance + "kb"); } Options.setMaxDistance(maxDistance); if(hapThresh != -1) { Options.setHaplotypeDisplayThreshold(hapThresh); argHandlerMessages.add("Haplotype display threshold = " + hapThresh); } if(minimumMAF != -1) { CheckData.mafCut = minimumMAF; argHandlerMessages.add("Minimum MAF = " + minimumMAF); } if(minimumGenoPercent != -1) { CheckData.failedGenoCut = (int)(minimumGenoPercent*100); argHandlerMessages.add("Minimum SNP genotype % = " + minimumGenoPercent); } if(hwCutoff != -1) { CheckData.hwCut = hwCutoff; argHandlerMessages.add("Hardy Weinberg equilibrium p-value cutoff = " + hwCutoff); } if(maxMendel != -1) { CheckData.numMendErrCut = maxMendel; argHandlerMessages.add("Maximum number of Mendel errors = "+maxMendel); } if(spacingThresh != -1) { Options.setSpacingThreshold(spacingThresh); argHandlerMessages.add("LD display spacing value = "+spacingThresh); } if(missingCutoff != -1) { Options.setMissingThreshold(missingCutoff); argHandlerMessages.add("Maximum amount of missing data allowed per individual = "+missingCutoff); } if(cutHighCI != -1) { FindBlocks.cutHighCI = cutHighCI; } if(cutLowCI != -1) { FindBlocks.cutLowCI = cutLowCI; } if(mafThresh != -1) { FindBlocks.mafThresh = mafThresh; } if(recHighCI != -1) { FindBlocks.recHighCI = recHighCI; } if(informFrac != -1) { FindBlocks.informFrac = informFrac; } if(fourGameteCutoff != -1) { FindBlocks.fourGameteCutoff = fourGameteCutoff; } if(spineDP != -1) { FindBlocks.spineDP = spineDP; } if(assocTDT) { Options.setAssocTest(ASSOC_TRIO); }else if(assocCC) { Options.setAssocTest(ASSOC_CC); } if (Options.getAssocTest() != ASSOC_NONE && infoFileName == null && hapmapFileName == null) { die("A marker info file must be specified when performing association tests."); } if(doPermutationTest) { if(!assocCC && !assocTDT) { die("An association test type must be specified for permutation tests to be performed."); } } if(customAssocTestsFileName != null) { if(!assocCC && !assocTDT) { die("An association test type must be specified when using a custom association test file."); } if(infoFileName == null) { die("A marker info file must be specified when using a custom association test file."); } } if(tagging != Tagger.NONE) { if(infoFileName == null && hapmapFileName == null && batchFileName == null && phasedhmpdataFileName == null && !phasedhapmapDownload) { die("A marker info file must be specified when tagging."); } if(forceExcludeTags == null) { forceExcludeTags = new Vector(); } else if (forceExcludeFileName != null) { die("-excludeTags and -excludeTagsFile cannot both be used"); } if(forceExcludeFileName != null) { File excludeFile = new File(forceExcludeFileName); forceExcludeTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(excludeFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ forceExcludeTags.add(line); } } }catch(IOException ioe) { die("An error occured while reading the file specified by -excludeTagsFile."); } } if(forceIncludeTags == null ) { forceIncludeTags = new Vector(); } else if (forceIncludeFileName != null) { die("-includeTags and -includeTagsFile cannot both be used"); } if(forceIncludeFileName != null) { File includeFile = new File(forceIncludeFileName); forceIncludeTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(includeFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ forceIncludeTags.add(line); } } }catch(IOException ioe) { die("An error occured while reading the file specified by -includeTagsFile."); } } if (captureAllelesFileName != null) { File captureFile = new File(captureAllelesFileName); captureAlleleTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(captureFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ line = line.trim(); captureAlleleTags.add(line); } } }catch(IOException ioe) { die("An error occured while reading the file specified by -captureAlleles."); } } if (designScoresFileName != null) { File designFile = new File(designScoresFileName); designScores = new Hashtable(1,1); try { BufferedReader br = new BufferedReader(new FileReader(designFile)); String line; int lines = 0; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ StringTokenizer st = new StringTokenizer(line); int length = st.countTokens(); if (length != 2){ die("Invalid formatting on line " + lines); } String marker = st.nextToken(); Double score = new Double(st.nextToken()); designScores.put(marker,score); } lines++; } }catch(IOException ioe) { die("An error occured while reading the file specified by -captureAlleles."); } } if (minTagDistance != null) { try{ if (Integer.parseInt(minTagDistance) < 0){ die("minimum tag distance cannot be negative"); } }catch(NumberFormatException nfe){ die("minimum tag distance must be a positive integer"); } Options.setTaggerMinDistance(Integer.parseInt(minTagDistance)); } //check that there isn't any overlap between include/exclude lists Vector tempInclude = (Vector) forceIncludeTags.clone(); tempInclude.retainAll(forceExcludeTags); if(tempInclude.size() > 0) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < tempInclude.size(); i++) { String s = (String) tempInclude.elementAt(i); sb.append(s).append(","); } die("The following markers appear in both the include and exclude lists: " + sb.toString()); } if(tagRSquaredCutOff != -1) { Options.setTaggerRsqCutoff(tagRSquaredCutOff); } } else if(forceExcludeTags != null || forceIncludeTags != null || tagRSquaredCutOff != -1) { die("-tagrSqCutoff, -excludeTags, -excludeTagsFile, -includeTags and -includeTagsFile cannot be used without a tagging option"); } if(chromosomeArg != null && hapmapFileName != null) { argHandlerMessages.add("-chromosome flag ignored when loading hapmap file"); chromosomeArg = null; } if(chromosomeArg != null) { Chromosome.setDataChrom("chr" + chromosomeArg); }else{ chromosomeArg = ""; } if (phasedhapmapDownload){ if (chromosomeArg == null){ die("-phasedhapmapdl requires a chromosome specification"); }else if (!(populationArg.equalsIgnoreCase("CEU") || populationArg.equalsIgnoreCase("YRI") || populationArg.equalsIgnoreCase("CHB+JPT"))){ die("-phasedhapmapdl requires a population specification of CEU, YRI, or CHB+JPT"); } if (Integer.parseInt(chromosomeArg) < 1 && Integer.parseInt(chromosomeArg) > 22){ if (!(chromosomeArg.equalsIgnoreCase("X")) && !(chromosomeArg.equalsIgnoreCase("Y"))){ die("-chromosome must be betweeen 1 and 22, X, or Y"); } } try{ if (Integer.parseInt(startPos) > Integer.parseInt(endPos)){ die("-endpos must be greater then -startpos"); } }catch(NumberFormatException nfe){ die("-startpos and -endpos must be integer values"); } if (release == null){ release = "21"; } if (!(release.equals("21")) && !(release.startsWith("16"))){ die("release must be either 16a or 21"); } } }
fc.setSelectedFile(null);
void saveDprimeToText(){ try{ fc.setSelectedFile(null); int returnVal = fc.showSaveDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { new TextMethods().saveDprimeToText(theData.dPrimeTable, fc.getSelectedFile(), infoKnown, theData.markerInfo); } }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
fc.setSelctedFile(null);
void saveHapsToText(){ try{ fc.setSelectedFile(null); int returnVal = fc.showSaveDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { new TextMethods().saveHapsToText(finishedHaplos, fc.getSelectedFile()); } }catch (IOException ioexec){ JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
super(config); time = System.currentTimeMillis();
this(config, System.currentTimeMillis());
public ApplicationEvent(ApplicationConfig config){ super(config); time = System.currentTimeMillis(); }
public ViewerTag(Class widgetClass) { super(widgetClass);
public ViewerTag(Class tagClass) { super(tagClass);
public ViewerTag(Class widgetClass) { super(widgetClass); }
public List keysOfColumn(SQLColumn col) throws ArchitectException { LinkedList keys = new LinkedList();
public List<SQLRelationship> keysOfColumn(SQLColumn col) throws ArchitectException { LinkedList<SQLRelationship> keys = new LinkedList<SQLRelationship>();
public List keysOfColumn(SQLColumn col) throws ArchitectException { LinkedList keys = new LinkedList(); Iterator it = getExportedKeys().iterator(); while (it.hasNext()) { SQLRelationship r = (SQLRelationship) it.next(); if (r.containsPkColumn(col)) { keys.add(r); } } it = getExportedKeys().iterator(); while (it.hasNext()) { SQLRelationship r = (SQLRelationship) it.next(); if (r.containsFkColumn(col)) { keys.add(r); } } return keys; }
System.out.println("Error: One or more males in the file is heterozygous.\nThese genotypes have been ignored.");
System.out.println("Error: At least one male in the file is heterozygous.\nThese genotypes have been ignored.");
private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File outputFile; File inputFile; AssociationTestSet customAssocSet; if(!quietMode && fileName != null){ System.out.println("Using data file: " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); //Vector result = null; if(fileType == HAPS_FILE){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED_FILE) { //read in ped file textData.linkageToChrom(inputFile, PED_FILE); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } if(textData.getPedFile().isHaploidHets()){ System.out.println("Error: One or more males in the file is heterozygous.\nThese genotypes have been ignored."); } }else{ //read in hapmapfile textData.linkageToChrom(inputFile,HMP_FILE); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); HashSet whiteListedCustomMarkers = new HashSet(); if (customAssocTestsFileName != null){ customAssocSet = new AssociationTestSet(customAssocTestsFileName); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } Hashtable snpsByName = new Hashtable(); for(int i=0;i<Chromosome.getUnfilteredSize();i++) { SNP snp = Chromosome.getUnfilteredMarker(i); snpsByName.put(snp.getDisplayName(), snp); } if(forceIncludeTags != null) { for(int i=0;i<forceIncludeTags.size();i++) { if(snpsByName.containsKey(forceIncludeTags.get(i))) { whiteListedCustomMarkers.add(snpsByName.get(forceIncludeTags.get(i))); } } } textData.getPedFile().setWhiteList(whiteListedCustomMarkers); boolean[] markerResults = new boolean[Chromosome.getUnfilteredSize()]; Vector result = null; result = textData.getPedFile().getResults(); //once check has been run we can filter the markers for (int i = 0; i < result.size(); i++){ if (((((MarkerResult)result.get(i)).getRating() > 0 || skipCheck) && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2)){ markerResults[i] = true; }else{ markerResults[i] = false; } } for (int i = 0; i < excludedMarkers.size(); i++){ int cur = ((Integer)excludedMarkers.elementAt(i)).intValue(); if (cur < 1 || cur > markerResults.length){ System.out.println("Excluded marker out of bounds: " + cur + "\nMarkers must be between 1 and N, where N is the total number of markers."); System.exit(1); }else{ markerResults[cur-1] = false; } } for(int i=0;i<Chromosome.getUnfilteredSize();i++) { if(textData.getPedFile().isWhiteListed(Chromosome.getUnfilteredMarker(i))) { markerResults[i] = true; } } Chromosome.doFilter(markerResults); if(!quietMode && infoFile != null){ System.out.println("Using marker information file: " + infoFile.getName()); } if(outputCheck && result != null){ textData.getPedFile().saveCheckDataToText(validateOutputFile(fileName + ".CHECK")); } if(individualCheck && result != null){ IndividualDialog id = new IndividualDialog(textData); id.printTable(validateOutputFile(fileName + ".INDCHECK")); } if(mendel && result != null){ MendelDialog md = new MendelDialog(textData); md.printTable(validateOutputFile(fileName + ".MENDEL" )); } Vector cust = new Vector(); AssociationTestSet blockTestSet = null; if(blockOutputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; Haplotype[][] filtHaplos; switch(blockOutputType){ case BLOX_GABRIEL: outputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: outputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: outputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: outputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); if(!quietMode) { System.out.println("Using custom blocks file " + blockFileName); } cust = textData.readBlocks(blocksFile); break; case BLOX_ALL: //handled below, so we don't do anything here outputFile = null; break; default: outputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL if(blockOutputType == BLOX_ALL) { outputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), outputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid Gabriel blocks."); } outputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), outputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid 4 Gamete blocks."); } outputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), outputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid LD Spine blocks."); } }else{ //guesses blocks based on output type determined above. textData.guessBlocks(blockOutputType, cust); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), outputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid blocks."); } } if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { if (blockOutputType == BLOX_ALL){ System.out.println("Haplotype association results cannot be used with block output \"ALL\""); }else{ if (haplos != null){ blockTestSet = new AssociationTestSet(haplos,null); blockTestSet.saveHapsToText(validateOutputFile(fileName + ".HAPASSOC")); }else if (!quietMode){ System.out.println("Skipping block association output: no valid blocks."); } } } } if(outputDprime) { outputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(outputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(outputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ outputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); if(!quietMode) { System.out.println("Using analysis track file " + trackFileName); } } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getUnfilteredSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, outputFile.getAbsolutePath()); }catch(JimiException je){ System.out.println(je.getMessage()); } } AssociationTestSet markerTestSet =null; if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC){ if (randomizeAffection){ Vector aff = new Vector(); int j=0, k=0; for (int i = 0; i < textData.getPedFile().getNumIndividuals(); i++){ if (i%2 == 0){ aff.add(new Integer(1)); j++; }else{ aff.add(new Integer(2)); k++; } } Collections.shuffle(aff); markerTestSet = new AssociationTestSet(textData.getPedFile(),aff,null,Chromosome.getAllMarkers()); }else{ markerTestSet = new AssociationTestSet(textData.getPedFile(),null,null,Chromosome.getAllMarkers()); } markerTestSet.saveSNPsToText(validateOutputFile(fileName + ".ASSOC")); } if(customAssocSet != null) { if(!quietMode) { System.out.println("Using custom association test file " + customAssocTestsFileName); } try { customAssocSet.setPermTests(doPermutationTest); customAssocSet.runFileTests(textData,markerTestSet.getMarkerAssociationResults()); customAssocSet.saveResultsToText(validateOutputFile(fileName + ".CUSTASSOC")); }catch(IOException ioe) { System.out.println("An error occured writing the custom association results file."); customAssocSet = null; } } if(doPermutationTest) { AssociationTestSet permTests = new AssociationTestSet(); permTests.cat(markerTestSet); if(blockTestSet != null) { permTests.cat(blockTestSet); } final PermutationTestSet pts = new PermutationTestSet(permutationCount,textData.getPedFile(),customAssocSet,permTests); Thread permThread = new Thread(new Runnable() { public void run() { if (pts.isCustom()){ pts.doPermutations(PermutationTestSet.CUSTOM); }else{ pts.doPermutations(PermutationTestSet.SINGLE_PLUS_BLOCKS); } } }); permThread.start(); if(!quietMode) { System.out.println("Starting " + permutationCount + " permutation tests (each . printed represents 1% of tests completed)"); } int dotsPrinted =0; while(pts.getPermutationCount() - pts.getPermutationsPerformed() > 0) { while(( (double)pts.getPermutationsPerformed() / pts.getPermutationCount())*100 > dotsPrinted) { System.out.print("."); dotsPrinted++; } try{ Thread.sleep(100); }catch(InterruptedException ie) {} } System.out.println(); try { pts.writeResultsToFile(validateOutputFile(fileName + ".PERMUT")); } catch(IOException ioe) { System.out.println("An error occured while writing the permutation test results to file."); } } if(tagging != Tagger.NONE) { if(textData.dpTable == null) { textData.generateDPrimeTable(); } Vector snps = Chromosome.getAllMarkers(); HashSet names = new HashSet(); for (int i = 0; i < snps.size(); i++) { SNP snp = (SNP) snps.elementAt(i); names.add(snp.getDisplayName()); } HashSet filteredNames = new HashSet(); for(int i=0;i<Chromosome.getSize();i++) { filteredNames.add(Chromosome.getMarker(i).getDisplayName()); } Vector sitesToCapture = new Vector(); for(int i=0;i<Chromosome.getSize();i++) { sitesToCapture.add(Chromosome.getMarker(i)); } for (int i = 0; i < forceIncludeTags.size(); i++) { String s = (String) forceIncludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced included tags since I don't know about it."); } } for (int i = 0; i < forceExcludeTags.size(); i++) { String s = (String) forceExcludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced excluded tags since I don't know about it."); } } //chuck out filtered jazz from excludes, and nonexistent markers from both forceExcludeTags.retainAll(filteredNames); forceIncludeTags.retainAll(names); if(!quietMode) { System.out.println("Starting tagging."); } TaggerController tc = new TaggerController(textData,forceIncludeTags,forceExcludeTags,sitesToCapture, tagging,maxNumTags,findTags); tc.runTagger(); while(!tc.isTaggingCompleted()) { try { Thread.sleep(100); }catch(InterruptedException ie) {} } tc.saveResultsToFile(validateOutputFile(fileName + ".TAGS")); tc.dumpTests(validateOutputFile(fileName + ".TESTS")); //todo: I don't like this at the moment, removed subject to further consideration. //tc.dumpTags(validateOutputFile(fileName + ".TAGSNPS")); } } catch(IOException e){ System.err.println("An error has occured:"); System.err.println(e.getMessage()); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } catch(TaggerException te){ System.err.println(te.getMessage()); } }
setFrequencyCutoff(((double)freqCutoff)/100);
filterByFrequency(((double)freqCutoff)/100);
public HaplotypeAssociationResult(Haplotype[] locusHaplos, int freqCutoff, String n) { nf.setGroupingUsed(false); for (int i = 0; i < locusHaplos.length; i++){ alleles.add(locusHaplos[i]); } setFrequencyCutoff(((double)freqCutoff)/100); name = n; haps = locusHaplos; }
if (i!=j){
if (h != alleles.get(j)){
public String getCountString(int i){ nf.setMinimumFractionDigits(1); nf.setMaximumFractionDigits(1); Haplotype h = (Haplotype) filteredAlleles.get(i); StringBuffer countSB = new StringBuffer(); if(Options.getAssocTest() == ASSOC_TRIO) { countSB.append(nf.format(h.getTransCount())).append(" : ").append(nf.format(h.getUntransCount())); } else if(Options.getAssocTest() == ASSOC_CC) { double caseSum = 0, controlSum = 0; for (int j = 0; j < alleles.size(); j++){ if (i!=j){ caseSum += ((Haplotype)alleles.get(j)).getCaseCount(); controlSum += ((Haplotype)alleles.get(j)).getControlCount(); } } countSB.append(nf.format(h.getCaseCount())).append(" : ").append(nf.format(caseSum)).append(", "); countSB.append(nf.format(h.getControlCount())).append(" : ").append(nf.format(controlSum)); } return countSB.toString(); }
public Context getContext() {
public JellyContext getContext() {
public Context getContext() { return context; }
else if(countOptions == 0) {
else if(countOptions == 0 && nogui) {
private void argHandler(String[] args){ //TODO: -specify values from HaplotypeDisplayController (min hap percentage etc) //TODO: -want to be able to output haps file from pedfile /* boolean nogui = false; String batchMode = ""; String hapsFileName = ""; String pedFileName = ""; String infoFileName = ""; String hapmapFileName = ""; String blockFileName = ""; boolean showCheck = false; boolean skipCheck = false; Vector ignoreMarkers = new Vector(); int outputType = -1; int maxDistance = -1; boolean quietMode = false; boolean outputDprime=false; boolean outputPNG = false; boolean outputSmallPNG = false; boolean outputCheck=false;*/ int maxDistance = -1; //this means that user didn't specify any output type if it doesn't get changed below outputType = -1; for(int i =0; i < args.length; i++) { if(args[i].equals("-help") || args[i].equals("-h")) { System.out.println(HELP_OUTPUT); System.exit(0); } else if(args[i].equals("-n") || args[i].equals("-nogui")) { nogui = true; } else if(args[i].equals("-p") || args[i].equals("-pedfile")) { i++; if( i>=args.length || (args[i].charAt(0) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(pedFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last pedfile listed will be used"); } pedFileName = args[i]; } } else if (args[i].equals("-skipcheck") || args[i].equals("--skipcheck")){ skipCheck = true; } //todo: fix ignoremarkers /* else if (args[i].equals("--ignoremarkers")){ i++; if(i>=args.length || (args[i].charAt(0) == '-')){ System.out.println("--ignoremarkers requires a list of markers"); System.exit(1); } else { StringTokenizer str = new StringTokenizer(args[i],","); while(str.hasMoreTokens()) { ignoreMarkers.add(str.nextToken()); } } } */ else if(args[i].equals("-ha") || args[i].equals("-l") || args[i].equals("-haps")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(hapsFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last haps file listed will be used"); } hapsFileName = args[i]; } } else if(args[i].equals("-i") || args[i].equals("-info")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(infoFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last info file listed will be used"); } infoFileName = args[i]; } } else if (args[i].equals("-a") || args[i].equals("-hapmap")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(hapmapFileName != null){ System.out.println("multiple "+args[i-1] + " arguments found. only last hapmap file listed will be used"); } hapmapFileName = args[i]; } } else if(args[i].equals("-k") || args[i].equals("-blocks")) { i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ blockFileName = args[i]; outputType = BLOX_CUSTOM; }else{ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } } else if (args[i].equalsIgnoreCase("-png")){ outputPNG = true; } else if (args[i].equalsIgnoreCase("-smallpng") || args[i].equalsIgnoreCase("-compressedPNG")){ outputCompressedPNG = true; } else if (args[i].equals("-track")){ i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ trackFileName = args[i]; }else{ System.out.println("-track requires a filename"); System.exit(1); } } else if(args[i].equals("-o") || args[i].equals("-output") || args[i].equalsIgnoreCase("-blockoutput")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(outputType != -1){ System.out.println("only one output argument is allowed"); System.exit(1); } if(args[i].equalsIgnoreCase("SFS") || args[i].equalsIgnoreCase("GAB")){ outputType = BLOX_GABRIEL; } else if(args[i].equalsIgnoreCase("GAM")){ outputType = BLOX_4GAM; } else if(args[i].equalsIgnoreCase("MJD") || args[i].equalsIgnoreCase("SPI")){ outputType = BLOX_SPINE; } else if(args[i].equalsIgnoreCase("ALL")) { outputType = BLOX_ALL; } } else { //defaults to SFS output outputType = BLOX_GABRIEL; i--; } } else if(args[i].equals("-d") || args[i].equals("--dprime") || args[i].equals("-dprime")) { outputDprime = true; } else if (args[i].equals("-c") || args[i].equals("-check")){ outputCheck = true; } else if(args[i].equals("-m") || args[i].equals("-maxdistance")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires an integer argument"); System.exit(1); } else { if(maxDistance != -1){ System.out.println("only one "+args[i-1] + " argument allowed"); System.exit(1); } maxDistance = Integer.parseInt(args[i]); if(maxDistance<0){ System.out.println(args[i-1] + " argument must be a positive integer"); System.exit(1); } } } else if(args[i].equals("-b") || args[i].equals("-batch")) { //batch mode i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ System.out.println(args[i-1] + " requires a filename"); System.exit(1); } else{ if(batchFileName != null){ System.out.println("multiple " + args[i-1] + " arguments found. only last batch file listed will be used"); } batchFileName = args[i]; } } else if(args[i].equals("-q") || args[i].equals("-quiet")) { quietMode = true; } else { System.out.println("invalid parameter specified: " + args[i]); } } int countOptions = 0; if(pedFileName != null) { countOptions++; } if(hapsFileName != null) { countOptions++; } if(hapmapFileName != null) { countOptions++; } if(batchFileName != null) { countOptions++; } if(countOptions > 1) { System.out.println("Only one genotype input file may be specified on the command line."); System.exit(1); } else if(countOptions == 0) { System.out.println("You must specify a genotype input file."); System.exit(1); } //mess with vars, set defaults, etc if( outputType == -1 && ( pedFileName != null || hapsFileName != null || batchFileName != null || hapmapFileName != null) && !outputDprime && !outputCheck && !outputPNG && !outputCompressedPNG) { outputType = BLOX_GABRIEL; if(nogui && !quietMode) { System.out.println("No output type specified. Default of Gabriel will be used"); } } if(skipCheck && !quietMode) { System.out.println("Skipping genotype file check"); } if(maxDistance == -1){ maxDistance = 500; } Options.setMaxDistance(maxDistance); }
logger.debug("displayExceptionDialog: showing exception dialog for:", throwable);
private static void displayExceptionDialog(Component parent, String message, Throwable throwable) { if (parent == null) { logger.error("displayExceptionDialog with null parent for message " + message); } StringWriter traceWriter = new StringWriter(); throwable.printStackTrace(new PrintWriter(traceWriter)); JPanel messageComponent = new JPanel(new BorderLayout()); messageComponent.add(new JLabel(message), BorderLayout.NORTH); messageComponent.add(new JScrollPane(new JTextArea(traceWriter.toString())), BorderLayout.CENTER); messageComponent.setPreferredSize(new Dimension(600, 400)); JOptionPane.showMessageDialog(parent, messageComponent, "Error Report", JOptionPane.ERROR_MESSAGE); }
protected String getBodyText( Context context ) throws Exception { StringWriter writer = new StringWriter(); body.run( context, XMLOutput.createXMLOutput( writer ) ); return writer.toString(); }
protected String getBodyText() throws Exception { StringWriter writer = new StringWriter(); body.run(context, XMLOutput.createXMLOutput(writer)); return writer.toString(); }
protected String getBodyText( Context context ) throws Exception { // XXX: could maybe optimise this later on by having a pool of buffers StringWriter writer = new StringWriter(); body.run( context, XMLOutput.createXMLOutput( writer ) ); return writer.toString(); }
public void run(Context context, XMLOutput output) throws Exception { getBody().run(context, output); }
public void run(JellyContext context, XMLOutput output) throws Exception { getBody().run(context, output); }
public void run(Context context, XMLOutput output) throws Exception { getBody().run(context, output); }
getThreadScriptDataMap().clear(); variables.clear();
clearScriptData(); clearVariables();
public void clear() { getThreadScriptDataMap().clear(); variables.clear(); }
threadLocalScriptData = new ThreadLocal();
threadLocalScriptData.clear();
public void clearScriptData() { threadLocalScriptData = new ThreadLocal(); }
Map data = (Map) threadLocalScriptData.get();
Thread t = Thread.currentThread(); Map data = (Map) threadLocalScriptData.get(t);
public Map getThreadScriptDataMap() { Map rv; Map data = (Map) threadLocalScriptData.get(); if (data == null) { rv = new HashMap(); threadLocalScriptData.set(rv); } else { rv = data; } return rv; }
threadLocalScriptData.set(rv);
threadLocalScriptData.put(t, rv);
public Map getThreadScriptDataMap() { Map rv; Map data = (Map) threadLocalScriptData.get(); if (data == null) { rv = new HashMap(); threadLocalScriptData.set(rv); } else { rv = data; } return rv; }
log.debug( "FUZZY_DATE - updateValue ({}) " );
protected void initModelFields() { modelFields.put( PHOTOGRAPHER, new FieldController( photos ) { protected void setModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; obj.setPhotographer( (String) value ); } protected Object getModelValue( Object model ) { if ( model == null ) { return null; } PhotoInfo obj = (PhotoInfo) model; return obj.getPhotographer(); } protected void updateView( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; obj.setPhotographer( (String) value ); } protected void updateValue( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; value = obj.getPhotographer(); } }); modelFields.put( FUZZY_DATE, new FieldController( photos ) { protected void setModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; FuzzyDate fd = (FuzzyDate) value; if ( fd != null ) { obj.setShootTime( fd.getDate() ); obj.setTimeAccuracy( fd.getAccuracy() ); } else { obj.setShootTime( null ); obj.setTimeAccuracy( 0 ); } } protected Object getModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; Date date = obj.getShootTime(); double accuracy = obj.getTimeAccuracy(); return new FuzzyDate( date, accuracy ); } protected void updateView( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; obj.setFuzzyDate( (FuzzyDate) value ); } protected void updateValue( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; value = obj.getFuzzyDate(); } }); modelFields.put( QUALITY, new FieldController( photos ) { protected void setModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; if ( value != null ) { obj.setQuality( ((Number)value).intValue() ); } else { obj.setQuality( 0 ); } } protected Object getModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; return new Double( obj.getQuality() ); } protected void updateView( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; obj.setQuality( (Number)value ); } protected void updateValue( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; value = obj.getQuality(); } }); modelFields.put( SHOOTING_PLACE, new FieldController( photos ) { protected void setModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; obj.setShootingPlace( (String) value ); } protected Object getModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; return obj.getShootingPlace(); } protected void updateView( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; obj.setShootPlace( (String) value ); } protected void updateValue( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; value = obj.getShootPlace(); } }); modelFields.put( CAMERA_MODEL, new FieldController( photos ) { protected void setModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; obj.setCamera( (String) value ); } protected Object getModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; return obj.getCamera(); } protected void updateView( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; obj.setCamera( (String) value ); } protected void updateValue( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; value = obj.getCamera(); } }); modelFields.put( FILM_TYPE, new FieldController( photos ) { protected void setModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; obj.setFilm( (String) value ); } protected Object getModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; return obj.getFilm(); } protected void updateView( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; obj.setFilm( (String) value ); } protected void updateValue( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; value = obj.getFilm(); } }); modelFields.put( LENS_TYPE, new FieldController( photos ) { protected void setModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; obj.setLens( (String) value ); } protected Object getModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; return obj.getLens(); } protected void updateView( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; obj.setLens( (String) value ); } protected void updateValue( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; value = obj.getLens(); } }); modelFields.put( DESCRIPTION, new FieldController( photos ) { protected void setModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; obj.setDescription( (String) value ); } protected Object getModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; return obj.getDescription(); } protected void updateView( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; obj.setDescription( (String) value ); } protected void updateValue( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; value = obj.getDescription(); } }); modelFields.put( TECHNOTE, new FieldController( photos ) { protected void setModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; obj.setTechNotes( (String) value ); } protected Object getModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; return obj.getTechNotes(); } protected void updateView( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; obj.setTechNote( (String) value ); } protected void updateValue( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; value = obj.getTechNote(); } }); modelFields.put( F_STOP, new FieldController( photos ) { protected void setModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; if ( value != null ) { obj.setFStop( ((Number)value).doubleValue() ); } else { obj.setFStop( 0 ); } } protected Object getModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; return new Double( obj.getFStop() ); } protected void updateView( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; obj.setFStop( (Number)value ); } protected void updateValue( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; value = obj.getFStop(); } }); modelFields.put( SHUTTER_SPEED, new FieldController( photos ) { protected void setModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; if ( value != null ) { obj.setShutterSpeed( ((Number)value).doubleValue() ); } else { obj.setShutterSpeed( 0 ); } } protected Object getModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; return new Double( obj.getShutterSpeed() ); } protected void updateView( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; obj.setShutterSpeed( (Number)value ); } protected void updateValue( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; value = obj.getShutterSpeed(); } }); modelFields.put( FOCAL_LENGTH, new FieldController( photos ) { protected void setModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; if ( value != null ) { obj.setFocalLength( ((Number)value).doubleValue() ); } else { obj.setFocalLength( 0 ); } } protected Object getModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; return new Double( obj.getFocalLength() ); } protected void updateView( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; obj.setFocalLength( (Number)value ); } protected void updateValue( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; value = obj.getFocalLength(); } }); modelFields.put( FILM_SPEED, new FieldController( photos ) { protected void setModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; if ( value != null ) { obj.setFilmSpeed( ((Number)value).intValue() ); } else { obj.setFilmSpeed( 0 ); } } protected Object getModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; return new Double( obj.getFilmSpeed() ); } protected void updateView( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; obj.setFilmSpeed( (Number)value ); } protected void updateValue( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; value = obj.getFilmSpeed(); } }); // TODO: Add other fields // Init the views in the fields Iterator iter = modelFields.values().iterator(); while( iter.hasNext() ) { FieldController fieldCtrl = (FieldController) iter.next(); fieldCtrl.setViews( views ); } }
log.debug( "FUZZY_DATE - getModeValue ({}) " );
protected Object getModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; Date date = obj.getShootTime(); double accuracy = obj.getTimeAccuracy(); return new FuzzyDate( date, accuracy ); }
log.debug( "FUZZY_DATE - setModeValue ({}) " );
protected void setModelValue( Object model ) { PhotoInfo obj = (PhotoInfo) model; FuzzyDate fd = (FuzzyDate) value; if ( fd != null ) { obj.setShootTime( fd.getDate() ); obj.setTimeAccuracy( fd.getAccuracy() ); } else { obj.setShootTime( null ); obj.setTimeAccuracy( 0 ); } }
log.debug( "FUZZY_DATE - updateValue ({}) " );
protected void updateValue( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; value = obj.getFuzzyDate(); }
log.debug( "FUZZY_DATE - updateView ({}) " );
protected void updateView( Object view ) { PhotoInfoView obj = (PhotoInfoView) view; obj.setFuzzyDate( (FuzzyDate) value ); }
if (this.nullable != argNullable) { this.nullable = argNullable; fireDbObjectChanged("nullable",oldNullable,argNullable); }
this.nullable = argNullable; fireDbObjectChanged("nullable",oldNullable,argNullable);
public void setNullable(int argNullable) { int oldNullable = this.nullable; logger.debug("Changing nullable "+oldNullable+" -> "+argNullable); if (this.nullable != argNullable) { this.nullable = argNullable; fireDbObjectChanged("nullable",oldNullable,argNullable); } }
try {
Integer oldPrimaryKeySeq = primaryKeySeq; if (!isMagicEnabled()) { this.primaryKeySeq = argPrimaryKeySeq; fireDbObjectChanged("primaryKeySeq",oldPrimaryKeySeq,argPrimaryKeySeq); } else try {
public void setPrimaryKeySeq(Integer argPrimaryKeySeq) { // do nothing if there's no change if ( (primaryKeySeq == null && argPrimaryKeySeq == null) || (primaryKeySeq != null && primaryKeySeq.equals(argPrimaryKeySeq)) ) { return; } try { startCompoundEdit("Starting PrimaryKeySeq compound edit"); Integer oldPrimaryKeySeq = primaryKeySeq; if (argPrimaryKeySeq != null && !this.autoIncrement) { setNullable(DatabaseMetaData.columnNoNulls); } this.primaryKeySeq = argPrimaryKeySeq; fireDbObjectChanged("primaryKeySeq",oldPrimaryKeySeq,argPrimaryKeySeq); SQLObject p = parent; if (p != null) { p.removeChild(this); int idx = 0; int targetPKS = primaryKeySeq == null ? Integer.MAX_VALUE : primaryKeySeq.intValue(); logger.debug("Parent = "+p); logger.debug("Parent.children = "+p.children); for (SQLColumn col : (List<SQLColumn>) p.children) { if (col.getPrimaryKeySeq() == null || col.getPrimaryKeySeq() > targetPKS) { logger.debug("idx is " + idx); break; } idx++; } p.addChild(idx, this); getParentTable().normalizePrimaryKey(); } } catch (ArchitectException e) { throw new ArchitectRuntimeException(e); } finally { endCompoundEdit("Ending PrimaryKeySeq compound edit"); } }
Integer oldPrimaryKeySeq = primaryKeySeq;
public void setPrimaryKeySeq(Integer argPrimaryKeySeq) { // do nothing if there's no change if ( (primaryKeySeq == null && argPrimaryKeySeq == null) || (primaryKeySeq != null && primaryKeySeq.equals(argPrimaryKeySeq)) ) { return; } try { startCompoundEdit("Starting PrimaryKeySeq compound edit"); Integer oldPrimaryKeySeq = primaryKeySeq; if (argPrimaryKeySeq != null && !this.autoIncrement) { setNullable(DatabaseMetaData.columnNoNulls); } this.primaryKeySeq = argPrimaryKeySeq; fireDbObjectChanged("primaryKeySeq",oldPrimaryKeySeq,argPrimaryKeySeq); SQLObject p = parent; if (p != null) { p.removeChild(this); int idx = 0; int targetPKS = primaryKeySeq == null ? Integer.MAX_VALUE : primaryKeySeq.intValue(); logger.debug("Parent = "+p); logger.debug("Parent.children = "+p.children); for (SQLColumn col : (List<SQLColumn>) p.children) { if (col.getPrimaryKeySeq() == null || col.getPrimaryKeySeq() > targetPKS) { logger.debug("idx is " + idx); break; } idx++; } p.addChild(idx, this); getParentTable().normalizePrimaryKey(); } } catch (ArchitectException e) { throw new ArchitectRuntimeException(e); } finally { endCompoundEdit("Ending PrimaryKeySeq compound edit"); } }
if (type != argType) { setSourceDataTypeName(null); this.type = argType; fireDbObjectChanged("type",oldType,argType); }
setSourceDataTypeName(null); this.type = argType; fireDbObjectChanged("type",oldType,argType);
public void setType(int argType) { int oldType = type; if (type != argType) { setSourceDataTypeName(null); this.type = argType; fireDbObjectChanged("type",oldType,argType); } }
System.out.println("Col index "+selectedColIndex);
logger.debug("Col index "+selectedColIndex);
public void mouseClicked(MouseEvent evt) { Point p = evt.getPoint(); unzoomPoint(p); PlayPenComponent c = contentPane.getComponentAt(p); if (c != null) p.translate(-c.getX(), -c.getY()); if ( c instanceof Relationship) { if (evt.getClickCount() == 2) { ArchitectFrame.getMainInstance().editRelationshipAction.actionPerformed (new ActionEvent(evt.getSource(), ActionEvent.ACTION_PERFORMED, ArchitectSwingConstants.ACTION_COMMAND_SRC_PLAYPEN)); } else if(evt.getClickCount()==1){ if (c.isSelected()&& componentPreviouslySelected)c.setSelected(false); } } else if ( c instanceof TablePane ) { TablePane tp = (TablePane) c; if ((evt.getModifiers() & MouseEvent.BUTTON1_MASK) != 0) { try { int selectedColIndex = tp.pointToColumnIndex(p); if (evt.getClickCount() == 2) { // double click if (tp.isSelected()) { ArchitectFrame af = ArchitectFrame.getMainInstance(); if (selectedColIndex == TablePane.COLUMN_INDEX_TITLE) { af.editTableAction.actionPerformed (new ActionEvent(tp, ActionEvent.ACTION_PERFORMED, ArchitectSwingConstants.ACTION_COMMAND_SRC_PLAYPEN)); } else if (selectedColIndex >= 0) { af.editColumnAction.actionPerformed (new ActionEvent(tp, ActionEvent.ACTION_PERFORMED, ArchitectSwingConstants.ACTION_COMMAND_SRC_PLAYPEN)); } } } else if(evt.getClickCount()==1) { System.out.println("Col index "+selectedColIndex); if (selectedColIndex > TablePane.COLUMN_INDEX_TITLE && componentPreviouslySelected){ ((TablePane)c).deselectColumn(selectedColIndex); } else if (c.isSelected()&& componentPreviouslySelected) { c.setSelected(false); } } } catch (ArchitectException e) { logger.error("Exception converting point to column", e); } } } else { maybeShowPopup(evt); } }
ddlg.makeDropForeignKeySQL(c.getCatalog(), c.getSchema(), rs.getString("FKTABLE_NAME"), c.getName()));
ddlg.makeDropForeignKeySQL(rs.getString("FKTABLE_NAME"), c.getName()));
private void addDependantsFromKeys(ResultSet rs) throws SQLException { Conflict prev = null; while (rs.next()) { Conflict c = new Conflict("FOREIGN KEY", rs.getString("FKTABLE_CAT"), rs.getString("FKTABLE_SCHEM"), rs.getString("FK_NAME")); // multi-column keys get multiple rows in this result set. We need to skip 'em. if (c.equals(prev)) continue; prev = c; c.setSqlDropStatement( ddlg.makeDropForeignKeySQL(c.getCatalog(), c.getSchema(), rs.getString("FKTABLE_NAME"), c.getName())); dependants.add(c); } }
c.setSqlDropStatement(ddlg.makeDropTableSQL(c.getCatalog(), c.getSchema(), c.getName()));
ddlg.setTargetCatalog(c.getCatalog()); ddlg.setTargetSchema(c.getSchema()); c.setSqlDropStatement(ddlg.makeDropTableSQL(c.getName()));
public void findConflicting() throws SQLException, ArchitectException { doingFindConflicting = true; Connection con = null; try { conflicts = new ArrayList(); monitorableProgress = 0; if (logger.isDebugEnabled()) { logger.debug("About to find conflicting objects for DDL Script: "+ddlStatements); } con = targetDatabase.getConnection(); DatabaseMetaData dbmd = con.getMetaData(); Iterator it = ddlStatements.iterator(); while (it.hasNext()) { DDLStatement ddlStmt = (DDLStatement) it.next(); monitorableProgress += 1; if (ddlStmt.getType() != DDLStatement.StatementType.CREATE) continue; SQLObject so = ddlStmt.getObject(); Class clazz = so.getClass(); if (clazz.equals(SQLTable.class)) { SQLTable t = (SQLTable) so; String cat = ddlStmt.getTargetCatalog(); String sch = ddlStmt.getTargetSchema(); if (logger.isDebugEnabled()) { logger.debug("Finding conflicts for TABLE '" + cat + "'.'" + sch + "'.'" + t.getPhysicalName() + "'"); } ResultSet rs = dbmd.getTables( ddlg.toIdentifier(cat), ddlg.toIdentifier(sch), ddlg.toIdentifier(t.getPhysicalName()), null); while (rs.next()) { Conflict c = new Conflict( rs.getString("TABLE_TYPE"), rs.getString("TABLE_CAT"), rs.getString("TABLE_SCHEM"), rs.getString("TABLE_NAME")); c.setSqlDropStatement(ddlg.makeDropTableSQL(c.getCatalog(), c.getSchema(), c.getName())); c.addTableDependants(dbmd); conflicts.add(c); } rs.close(); } else if (clazz.equals(SQLRelationship.class)) { logger.error("Relationship conflicts are not supported yet!"); } else { throw new IllegalArgumentException( "Unknown subclass of SQLObject: " + clazz.getName()); } } if (logger.isDebugEnabled()) { logger.debug("Found conflicts: " + conflicts); } } finally { findConflictingFinished = true; doingFindConflicting = false; try { if (con != null) con.close(); } catch (SQLException ex) { logger.error("Couldn't close connection"); } } }
return (Tag) tagHolderMap.get(script);
Tag tag = (Tag) tagHolderMap.get(script); if( tag == null && getParent() != null) { return getParent().getTagOfTagScript(script); } else { return tag; }
public Tag getTagOfTagScript(TagScript script) { if( script == null ) return null; return (Tag) tagHolderMap.get(script); }
public void run(Context context, XMLOutput output) throws Exception {
public void run(JellyContext context, XMLOutput output) throws Exception {
public void run(Context context, XMLOutput output) throws Exception { tagLibrary = new DynamicTagLibrary( getUri() ); context.registerTagLibrary( getUri(), tagLibrary ); getBody().run(context, output); tagLibrary = null; }
public DynamicTagLibrary(String uri) { this.uri = uri;
public DynamicTagLibrary() {
public DynamicTagLibrary(String uri) { this.uri = uri; }
public Vector calcTDT(Vector chromosomes) { int numChroms; Chromosome chromT, chromU,chromTemp; String ped,ind; Vector temp; numChroms = chromosomes.size(); temp = (Vector)chromosomes.clone(); chromosomes = temp;
public static Vector calcTDT(Vector chromosomes) { Vector results = new Vector();
public Vector calcTDT(Vector chromosomes) { int numChroms; Chromosome chromT, chromU,chromTemp; String ped,ind; Vector temp; numChroms = chromosomes.size(); temp = (Vector)chromosomes.clone(); chromosomes = temp; int numMarkers = Chromosome.getSize(); for(int k=0;k<numMarkers;k++){ this.results.add(new TDTResult(Chromosome.getMarker(k))); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); //System.out.println("ind1T: " + chrom1T.getPed() + "\t" + chrom1T.getIndividual() ); //System.out.println("ind1U: " + chrom1U.getPed() + "\t" + chrom1U.getIndividual() ); //System.out.println("ind2T: " + chrom2T.getPed() + "\t" + chrom2T.getIndividual() ); //System.out.println("ind2U: " + chrom2U.getPed() + "\t" + chrom2U.getIndividual() ); for(int j=0;j<numMarkers;j++){ byte allele1T = chrom1T.getGenotype(j); byte allele1U = chrom1U.getGenotype(j); byte allele2T = chrom2T.getGenotype(j); byte allele2U = chrom2U.getGenotype(j); TDTResult curRes = (TDTResult)results.get(j); //System.out.println("marker "+ j + ":\t " + allele1T + "\t" + allele1U + "\t" + allele2T + "\t" + allele2U); curRes.tallyInd(allele1T,allele1U); curRes.tallyInd(allele2T,allele2U); } } for(int i=0;i<this.results.size();i++){ TDTResult tempRes = (TDTResult)this.results.get(i); int[][] counts = tempRes.counts; //System.out.println( counts[0][0] + "\t" + counts[1][1] + "\t" + counts[0][1] + "\t" + counts[1][0]); } return this.results; }
this.results.add(new TDTResult(Chromosome.getMarker(k)));
results.add(new TDTResult(Chromosome.getMarker(k)));
public Vector calcTDT(Vector chromosomes) { int numChroms; Chromosome chromT, chromU,chromTemp; String ped,ind; Vector temp; numChroms = chromosomes.size(); temp = (Vector)chromosomes.clone(); chromosomes = temp; int numMarkers = Chromosome.getSize(); for(int k=0;k<numMarkers;k++){ this.results.add(new TDTResult(Chromosome.getMarker(k))); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); //System.out.println("ind1T: " + chrom1T.getPed() + "\t" + chrom1T.getIndividual() ); //System.out.println("ind1U: " + chrom1U.getPed() + "\t" + chrom1U.getIndividual() ); //System.out.println("ind2T: " + chrom2T.getPed() + "\t" + chrom2T.getIndividual() ); //System.out.println("ind2U: " + chrom2U.getPed() + "\t" + chrom2U.getIndividual() ); for(int j=0;j<numMarkers;j++){ byte allele1T = chrom1T.getGenotype(j); byte allele1U = chrom1U.getGenotype(j); byte allele2T = chrom2T.getGenotype(j); byte allele2U = chrom2U.getGenotype(j); TDTResult curRes = (TDTResult)results.get(j); //System.out.println("marker "+ j + ":\t " + allele1T + "\t" + allele1U + "\t" + allele2T + "\t" + allele2U); curRes.tallyInd(allele1T,allele1U); curRes.tallyInd(allele2T,allele2U); } } for(int i=0;i<this.results.size();i++){ TDTResult tempRes = (TDTResult)this.results.get(i); int[][] counts = tempRes.counts; //System.out.println( counts[0][0] + "\t" + counts[1][1] + "\t" + counts[0][1] + "\t" + counts[1][0]); } return this.results; }
for(int i=0;i<this.results.size();i++){ TDTResult tempRes = (TDTResult)this.results.get(i);
for(int i=0;i<results.size();i++){ TDTResult tempRes = (TDTResult)results.get(i);
public Vector calcTDT(Vector chromosomes) { int numChroms; Chromosome chromT, chromU,chromTemp; String ped,ind; Vector temp; numChroms = chromosomes.size(); temp = (Vector)chromosomes.clone(); chromosomes = temp; int numMarkers = Chromosome.getSize(); for(int k=0;k<numMarkers;k++){ this.results.add(new TDTResult(Chromosome.getMarker(k))); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); //System.out.println("ind1T: " + chrom1T.getPed() + "\t" + chrom1T.getIndividual() ); //System.out.println("ind1U: " + chrom1U.getPed() + "\t" + chrom1U.getIndividual() ); //System.out.println("ind2T: " + chrom2T.getPed() + "\t" + chrom2T.getIndividual() ); //System.out.println("ind2U: " + chrom2U.getPed() + "\t" + chrom2U.getIndividual() ); for(int j=0;j<numMarkers;j++){ byte allele1T = chrom1T.getGenotype(j); byte allele1U = chrom1U.getGenotype(j); byte allele2T = chrom2T.getGenotype(j); byte allele2U = chrom2U.getGenotype(j); TDTResult curRes = (TDTResult)results.get(j); //System.out.println("marker "+ j + ":\t " + allele1T + "\t" + allele1U + "\t" + allele2T + "\t" + allele2U); curRes.tallyInd(allele1T,allele1U); curRes.tallyInd(allele2T,allele2U); } } for(int i=0;i<this.results.size();i++){ TDTResult tempRes = (TDTResult)this.results.get(i); int[][] counts = tempRes.counts; //System.out.println( counts[0][0] + "\t" + counts[1][1] + "\t" + counts[0][1] + "\t" + counts[1][0]); } return this.results; }
return this.results;
return results;
public Vector calcTDT(Vector chromosomes) { int numChroms; Chromosome chromT, chromU,chromTemp; String ped,ind; Vector temp; numChroms = chromosomes.size(); temp = (Vector)chromosomes.clone(); chromosomes = temp; int numMarkers = Chromosome.getSize(); for(int k=0;k<numMarkers;k++){ this.results.add(new TDTResult(Chromosome.getMarker(k))); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); //System.out.println("ind1T: " + chrom1T.getPed() + "\t" + chrom1T.getIndividual() ); //System.out.println("ind1U: " + chrom1U.getPed() + "\t" + chrom1U.getIndividual() ); //System.out.println("ind2T: " + chrom2T.getPed() + "\t" + chrom2T.getIndividual() ); //System.out.println("ind2U: " + chrom2U.getPed() + "\t" + chrom2U.getIndividual() ); for(int j=0;j<numMarkers;j++){ byte allele1T = chrom1T.getGenotype(j); byte allele1U = chrom1U.getGenotype(j); byte allele2T = chrom2T.getGenotype(j); byte allele2U = chrom2U.getGenotype(j); TDTResult curRes = (TDTResult)results.get(j); //System.out.println("marker "+ j + ":\t " + allele1T + "\t" + allele1U + "\t" + allele2T + "\t" + allele2U); curRes.tallyInd(allele1T,allele1U); curRes.tallyInd(allele2T,allele2U); } } for(int i=0;i<this.results.size();i++){ TDTResult tempRes = (TDTResult)this.results.get(i); int[][] counts = tempRes.counts; //System.out.println( counts[0][0] + "\t" + counts[1][1] + "\t" + counts[0][1] + "\t" + counts[1][0]); } return this.results; }
getBody().run(context, output);
invokeBody(output);
public void doTag(XMLOutput output) throws Exception { Object obj = getObject(); if ( this.task != null ) { Method method = MethodUtils.getAccessibleMethod( this.task.getClass(), "addText", addTaskParamTypes ); if (method != null) { String text = getBodyText(); Object[] args = { text }; method.invoke(this.task, args); } else { getBody().run(context, output); } this.task.perform(); } else { getBody().run( context, output ); AntTagSupport parent = (AntTagSupport) findAncestorWithClass(AntTagSupport.class); if ( parent != null ) { // otherwise it -must- be a top-level, non-parented datatype. Object parentObj = parent.getObject(); if ( parentObj != null ) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObj.getClass() ); try { ih.storeElement( getAntProject(), parentObj, getObject(), getTagName() ); } catch (Exception e) { } } } } }
String text = expression.getExpressionText(); assertEquals( "Wrong textual representation for expression text: ", expressionText, text);
protected void assertExpression(String expressionText, Object expectedValue) throws Exception { Expression expression = CompositeExpression.parse(expressionText, factory); assertTrue( "Created a valid expression for: " + expressionText, expression != null ); Object value = expression.evaluate(context); //assertEquals( "Expression for: " + expressionText + " is: " + expression, expectedValue, value ); assertEquals( "Wrong result for expression: " + expressionText, expectedValue, value ); }
ctrl.viewChanged( this, PhotoInfoController.QUALITY );
if ( getQuality() != null ) { ctrl.viewChanged( this, PhotoInfoController.QUALITY ); }
public void actionPerformed( ActionEvent evt ) { if ( evt.getActionCommand().equals( "save" ) ) { try { ctrl.save(); } catch ( Exception e ) { log.warn( "exception while saving" + e.getMessage() ); e.printStackTrace(); } } else if ( evt.getActionCommand().equals( "discard" ) ) { log.debug( "Discarding data" ); ctrl.discard(); } else if ( evt.getSource() == qualityField ) { log.debug( "quality changed" ); ctrl.viewChanged( this, PhotoInfoController.QUALITY ); } }
log.debug( "fuzzyDate = " + str );
public FuzzyDate getFuzzyDate( ) { String str = fuzzyDateField.getText( ); FuzzyDate d = FuzzyDate.parse( str ); return d; }
return photographerField.getText( );
String str = photographerField.getText( ); return str;
public String getPhotographer( ) { return photographerField.getText( ); }
return new Integer( qualityField.getSelectedIndex() );
int q = qualityField.getSelectedIndex(); Integer retval = null; if ( q >= 0 ) { retval = new Integer( q ); } return retval;
public Number getQuality() { return new Integer( qualityField.getSelectedIndex() ); }
ctrl.viewChanged( this, changedField );
Object fieldValue = ctrl.getField( changedField ); if ( fieldValue != null || changedDoc.getLength() > 0 ) { ctrl.viewChanged( this, changedField ); }
public void insertUpdate( DocumentEvent ev ) { Document changedDoc = ev.getDocument(); String changedField = (String) changedDoc.getProperty( FIELD_NAME ); ctrl.viewChanged( this, changedField );// // Handle fuzzy time// if ( changedDoc == fuzzyDateDoc ) {// log.warn( "Fuzzy date entered" );// String fdStr = fuzzyDateField.getText();// FuzzyDate fd = FuzzyDate.parse( fdStr );// // if ( fd != null ) {// // log.warn( "FuzzyDate parsed succesfully!!!" );// // shootingDayField.setValue( fd.getDate() );// // timeAccuracyField.setValue( new Double( fd.getAccuracy() ) );// // } // } }