rem
stringlengths 0
477k
| add
stringlengths 0
313k
| context
stringlengths 6
599k
|
---|---|---|
int[] thisBlock = new int[st.countTokens()]; int x = 0; | Vector goodies = new Vector(); | public Vector readBlocks(File infile) throws HaploViewException, IOException{ if (!infile.exists()){ throw new HaploViewException("File " + infile.getName() + " doesn't exist!"); } Vector cust = new Vector(); BufferedReader in = new BufferedReader(new FileReader(infile)); String currentLine; int lineCount = 0; int highestYet = -1; while ((currentLine = in.readLine()) != null){ lineCount ++; StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() == 1){ //complain if we have only one col throw new HaploViewException("File error on line " + lineCount + " in " + infile.getName()); }else if (st.countTokens() == 0){ //skip blank lines continue; } try{ int[] thisBlock = new int[st.countTokens()]; int x = 0; while (st.hasMoreTokens()){ //we're being nice to users and letting them input blocks with 1-offset thisBlock[x] = new Integer(st.nextToken()).intValue()-1; if (thisBlock[x] > Chromosome.getSize() || thisBlock[x] < 0){ throw new HaploViewException("Error, marker in block out of bounds: " + thisBlock[x] + "\non line " + lineCount); } if (thisBlock[x] <= highestYet){ throw new HaploViewException("Error, markers/blocks out of order or overlap:\n" + "on line " + lineCount); } highestYet = thisBlock[x]; x++; } cust.add(thisBlock); }catch (NumberFormatException nfe) { throw new HaploViewException("Format error on line " + lineCount + " in " + infile.getName()); } } return cust; } |
thisBlock[x] = new Integer(st.nextToken()).intValue()-1; | Integer nextInLine = new Integer(st.nextToken()); for (int y = 0; y < Chromosome.realIndex.length; y++){ if (Chromosome.realIndex[y] == nextInLine.intValue() - 1){ goodies.add(new Integer(y)); } } } int thisBlock[] = new int[goodies.size()]; for (int x = 0; x < goodies.size(); x++){ thisBlock[x] = ((Integer)goodies.elementAt(x)).intValue(); | public Vector readBlocks(File infile) throws HaploViewException, IOException{ if (!infile.exists()){ throw new HaploViewException("File " + infile.getName() + " doesn't exist!"); } Vector cust = new Vector(); BufferedReader in = new BufferedReader(new FileReader(infile)); String currentLine; int lineCount = 0; int highestYet = -1; while ((currentLine = in.readLine()) != null){ lineCount ++; StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() == 1){ //complain if we have only one col throw new HaploViewException("File error on line " + lineCount + " in " + infile.getName()); }else if (st.countTokens() == 0){ //skip blank lines continue; } try{ int[] thisBlock = new int[st.countTokens()]; int x = 0; while (st.hasMoreTokens()){ //we're being nice to users and letting them input blocks with 1-offset thisBlock[x] = new Integer(st.nextToken()).intValue()-1; if (thisBlock[x] > Chromosome.getSize() || thisBlock[x] < 0){ throw new HaploViewException("Error, marker in block out of bounds: " + thisBlock[x] + "\non line " + lineCount); } if (thisBlock[x] <= highestYet){ throw new HaploViewException("Error, markers/blocks out of order or overlap:\n" + "on line " + lineCount); } highestYet = thisBlock[x]; x++; } cust.add(thisBlock); }catch (NumberFormatException nfe) { throw new HaploViewException("Format error on line " + lineCount + " in " + infile.getName()); } } return cust; } |
x++; | public Vector readBlocks(File infile) throws HaploViewException, IOException{ if (!infile.exists()){ throw new HaploViewException("File " + infile.getName() + " doesn't exist!"); } Vector cust = new Vector(); BufferedReader in = new BufferedReader(new FileReader(infile)); String currentLine; int lineCount = 0; int highestYet = -1; while ((currentLine = in.readLine()) != null){ lineCount ++; StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() == 1){ //complain if we have only one col throw new HaploViewException("File error on line " + lineCount + " in " + infile.getName()); }else if (st.countTokens() == 0){ //skip blank lines continue; } try{ int[] thisBlock = new int[st.countTokens()]; int x = 0; while (st.hasMoreTokens()){ //we're being nice to users and letting them input blocks with 1-offset thisBlock[x] = new Integer(st.nextToken()).intValue()-1; if (thisBlock[x] > Chromosome.getSize() || thisBlock[x] < 0){ throw new HaploViewException("Error, marker in block out of bounds: " + thisBlock[x] + "\non line " + lineCount); } if (thisBlock[x] <= highestYet){ throw new HaploViewException("Error, markers/blocks out of order or overlap:\n" + "on line " + lineCount); } highestYet = thisBlock[x]; x++; } cust.add(thisBlock); }catch (NumberFormatException nfe) { throw new HaploViewException("Format error on line " + lineCount + " in " + infile.getName()); } } return cust; } |
|
System.out.println("Ignoring SQL Exception; assume object to be dropped didn't exist."); | System.out.println("Ignoring SQL Exception; assume relationship_test_child didn't exist."); | public void testReadFromDB() throws Exception { Connection con = db.getConnection(); Statement stmt = null; String lastSQL = null; try { stmt = con.createStatement(); try { stmt.executeUpdate("DROP TABLE relationship_test_child"); } catch (SQLException ex) { System.out.println("Ignoring SQL Exception; assume object to be dropped didn't exist."); System.out.println(ex.getMessage()); } try { stmt.executeUpdate("DROP TABLE relationship_test_parent"); } catch (SQLException ex) { System.out.println("Ignoring SQL Exception; assume object to be dropped didn't exist."); System.out.println(ex.getMessage()); } lastSQL = "CREATE TABLE relationship_test_parent (\n" + " pkcol_1 integer not null,\n" + " pkcol_2 integer not null,\n" + " attribute_1 integer not null)"; stmt.executeUpdate(lastSQL); lastSQL = "CREATE TABLE relationship_test_child (\n" + " parent_pkcol_1 integer not null,\n" + " parent_pkcol_2 integer not null,\n" + " child_attribute_1 integer not null)"; stmt.executeUpdate(lastSQL); lastSQL = "ALTER TABLE relationship_test_parent\n" + " ADD CONSTRAINT relationship_test_pk\n" + " PRIMARY KEY (pkcol_1 , pkcol_2)"; stmt.executeUpdate(lastSQL); lastSQL = "ALTER TABLE relationship_test_child\n" + " ADD CONSTRAINT relationship_test_fk\n" + " FOREIGN KEY (parent_pkcol_1, parent_pkcol_2)\n" + " REFERENCES relationship_test_parent (pkcol_1 , pkcol_2)"; stmt.executeUpdate(lastSQL); } catch (SQLException ex) { System.out.println("SQL Statement Failed:\n"+lastSQL+"\nStack trace of SQLException follows:"); ex.printStackTrace(); fail("SQL statement failed. See system console for details."); } finally { if (stmt != null) stmt.close(); } SQLTable parent = db.getTableByName("relationship_test_parent"); SQLTable child = db.getTableByName("relationship_test_child"); if (parent == null) { parent = db.getTableByName("relationship_test_parent".toUpperCase()); } SQLRelationship rel = (SQLRelationship) parent.getExportedKeys().get(0); assertEquals("relationship_test_fk", rel.getName().toLowerCase()); assertSame(parent, rel.getPkTable()); assertSame(child, rel.getFkTable()); assertEquals((SQLRelationship.ZERO | SQLRelationship.ONE | SQLRelationship.MANY), rel.getFkCardinality()); assertEquals(SQLRelationship.ONE, rel.getPkCardinality()); } |
System.out.println("Ignoring SQL Exception; assume object to be dropped didn't exist."); | System.out.println("Ignoring SQL Exception; assume relationship_test_parent didn't exist."); | public void testReadFromDB() throws Exception { Connection con = db.getConnection(); Statement stmt = null; String lastSQL = null; try { stmt = con.createStatement(); try { stmt.executeUpdate("DROP TABLE relationship_test_child"); } catch (SQLException ex) { System.out.println("Ignoring SQL Exception; assume object to be dropped didn't exist."); System.out.println(ex.getMessage()); } try { stmt.executeUpdate("DROP TABLE relationship_test_parent"); } catch (SQLException ex) { System.out.println("Ignoring SQL Exception; assume object to be dropped didn't exist."); System.out.println(ex.getMessage()); } lastSQL = "CREATE TABLE relationship_test_parent (\n" + " pkcol_1 integer not null,\n" + " pkcol_2 integer not null,\n" + " attribute_1 integer not null)"; stmt.executeUpdate(lastSQL); lastSQL = "CREATE TABLE relationship_test_child (\n" + " parent_pkcol_1 integer not null,\n" + " parent_pkcol_2 integer not null,\n" + " child_attribute_1 integer not null)"; stmt.executeUpdate(lastSQL); lastSQL = "ALTER TABLE relationship_test_parent\n" + " ADD CONSTRAINT relationship_test_pk\n" + " PRIMARY KEY (pkcol_1 , pkcol_2)"; stmt.executeUpdate(lastSQL); lastSQL = "ALTER TABLE relationship_test_child\n" + " ADD CONSTRAINT relationship_test_fk\n" + " FOREIGN KEY (parent_pkcol_1, parent_pkcol_2)\n" + " REFERENCES relationship_test_parent (pkcol_1 , pkcol_2)"; stmt.executeUpdate(lastSQL); } catch (SQLException ex) { System.out.println("SQL Statement Failed:\n"+lastSQL+"\nStack trace of SQLException follows:"); ex.printStackTrace(); fail("SQL statement failed. See system console for details."); } finally { if (stmt != null) stmt.close(); } SQLTable parent = db.getTableByName("relationship_test_parent"); SQLTable child = db.getTableByName("relationship_test_child"); if (parent == null) { parent = db.getTableByName("relationship_test_parent".toUpperCase()); } SQLRelationship rel = (SQLRelationship) parent.getExportedKeys().get(0); assertEquals("relationship_test_fk", rel.getName().toLowerCase()); assertSame(parent, rel.getPkTable()); assertSame(child, rel.getFkTable()); assertEquals((SQLRelationship.ZERO | SQLRelationship.ONE | SQLRelationship.MANY), rel.getFkCardinality()); assertEquals(SQLRelationship.ONE, rel.getPkCardinality()); } |
if ( answer == null ) { answer = context.getScopedVariable(text); | if ( answer == null ) { answer = context.getVariable(text); } return answer; | public Expression createExpression(final String text) throws Exception { final Expression jexlExpression = new JexlExpression( org.apache.commons.jexl.ExpressionFactory.createExpression(text) ); if ( isSupportAntVariables() && isValidAntVariableName(text) ) { ExpressionSupport expr = new ExpressionSupport() { public Object evaluate(JellyContext context) { Object answer = jexlExpression.evaluate(context); if ( answer == null ) { answer = context.getScopedVariable(text); } return answer; } }; return expr; } return jexlExpression; } |
return answer; } }; | }; | public Expression createExpression(final String text) throws Exception { final Expression jexlExpression = new JexlExpression( org.apache.commons.jexl.ExpressionFactory.createExpression(text) ); if ( isSupportAntVariables() && isValidAntVariableName(text) ) { ExpressionSupport expr = new ExpressionSupport() { public Object evaluate(JellyContext context) { Object answer = jexlExpression.evaluate(context); if ( answer == null ) { answer = context.getScopedVariable(text); } return answer; } }; return expr; } return jexlExpression; } |
if ( answer == null ) { answer = context.getScopedVariable(text); | if ( answer == null ) { answer = context.getVariable(text); } return answer; | public Object evaluate(JellyContext context) { Object answer = jexlExpression.evaluate(context); if ( answer == null ) { answer = context.getScopedVariable(text); } return answer; } |
return answer; } | public Object evaluate(JellyContext context) { Object answer = jexlExpression.evaluate(context); if ( answer == null ) { answer = context.getScopedVariable(text); } return answer; } |
|
Object value = evaluate(context); | Object value = evaluateAsValue(context); | public boolean evaluateAsBoolean(JellyContext context) { Object value = evaluate(context); if ( value instanceof Boolean ) { Boolean b = (Boolean) value; return b.booleanValue(); } else if ( value instanceof String ) { // return Boolean.getBoolean( (String) value ); String str = (String) value; if ( str.equals( "on" ) || str.equals( "yes" ) || str.equals( "1" ) || str.equals( "true" ) ) { return true; } else { return false; } } return false; } |
Object value = evaluate(context); | Object value = evaluateAsValue(context); | public Iterator evaluateAsIterator(JellyContext context) { Object value = evaluate(context); if ( value == null ) { return EMPTY_ITERATOR; } else if ( value instanceof Iterator ) { return (Iterator) value; } else if ( value instanceof List ) { List list = (List) value; return list.iterator(); } else if ( value instanceof Map ) { Map map = (Map) value; return map.entrySet().iterator(); } else if ( value.getClass().isArray() ) { return new ArrayIterator( value ); } else if ( value instanceof Enumeration ) { return new EnumerationIterator((Enumeration ) value); } else if ( value instanceof Collection ) { Collection collection = (Collection) value; return collection.iterator(); } else { // XXX: should we return single iterator? return new SingletonIterator( value ); } } |
Object value = evaluate(context); | Object value = evaluateAsValue(context); | public String evaluateAsString(JellyContext context) { Object value = evaluate(context); if ( value != null ) { return value.toString(); } return null; } |
PhotovaultSettings settings = PhotovaultSettings.getSettings(); db = settings.getCurrentDatabase(); db.addVolume( volume ); | public void setUp() { String volumeRoot = "c:\\temp\\photoVaultVolumeTest"; volume = new Volume( "testVolume", volumeRoot ); } |
|
db.removeVolume( volume ); | public void tearDown() { deleteTree( volume.getBaseDir() ); } |
|
db.addVolume( extVolume ); | public void testGetVolumeOfFile() { try { File testVolPath = File.createTempFile( "pv_voltest", "" ); VolumeBase extVolume = new ExternalVolume( "extvol", testVolPath.getAbsolutePath() ); File test1 = new File( volume.getBaseDir(), "testfile" ); assertEquals( test1.getAbsolutePath() + " belongs to volume", volume, VolumeBase.getVolumeOfFile( test1 ) ); File test2 = new File( testVolPath, "testfile" ); assertEquals( test2.getAbsolutePath() + " belongs to volume", extVolume, VolumeBase.getVolumeOfFile( test2 ) ); File test3 = new File( testVolPath.getParentFile(), "testfile" ); this.assertNull( test3.getAbsoluteFile() + " does not belong to volume", VolumeBase.getVolumeOfFile( test3 ) ); // Test that null argument does not cause error VolumeBase.getVolumeOfFile( null ); } catch (IOException ex) { fail( "IOError: " + ex.getMessage() ); } } |
|
setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); JPanel hapPercentPanel = new JPanel(); hapPercentPanel.add(new JLabel("Examine haplotypes above ")); hapPercentPanel.add(minDisplayField = new NumberTextField(String.valueOf(parent.displayThresh), 3)); minDisplayField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setDisplayThresh(Integer.parseInt(minDisplayField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); hapPercentPanel.add(new JLabel("%")); add(hapPercentPanel); | setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); | public HaplotypeDisplayController(HaplotypeDisplay parent) { this.parent = parent; setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); JPanel hapPercentPanel = new JPanel(); hapPercentPanel.add(new JLabel("Examine haplotypes above ")); hapPercentPanel.add(minDisplayField = new NumberTextField(String.valueOf(parent.displayThresh), 3)); minDisplayField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setDisplayThresh(Integer.parseInt(minDisplayField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); hapPercentPanel.add(new JLabel("%")); add(hapPercentPanel); JPanel thinPanel = new JPanel(); thinPanel.add(new JLabel("Connect with thin lines if > ")); thinPanel.add(minThinField = new NumberTextField(String.valueOf(parent.thinThresh), 3)); minThinField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThinThresh(Integer.parseInt(minThinField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thinPanel.add(new JLabel("%")); add(thinPanel); JPanel thickPanel = new JPanel(); thickPanel.add(new JLabel("Connect with thick lines if > ")); thickPanel.add(minThickField = new NumberTextField(String.valueOf(parent.thickThresh), 3)); minThickField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThickThresh(Integer.parseInt(minThickField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thickPanel.add(new JLabel("%")); add(thickPanel); fieldSize = minDisplayField.getPreferredSize(); } |
JPanel thinPanel = new JPanel(); thinPanel.add(new JLabel("Connect with thin lines if > ")); thinPanel.add(minThinField = new NumberTextField(String.valueOf(parent.thinThresh), 3)); minThinField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThinThresh(Integer.parseInt(minThinField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thinPanel.add(new JLabel("%")); add(thinPanel); | JPanel hapPercentPanel = new JPanel(); hapPercentPanel.add(new JLabel("Examine haplotypes above ")); hapPercentPanel.add(minDisplayField = new NumberTextField(String.valueOf(parent.displayThresh), 3)); minDisplayField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setDisplayThresh(Integer.parseInt(minDisplayField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); hapPercentPanel.add(new JLabel("%")); add(hapPercentPanel); | public HaplotypeDisplayController(HaplotypeDisplay parent) { this.parent = parent; setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); JPanel hapPercentPanel = new JPanel(); hapPercentPanel.add(new JLabel("Examine haplotypes above ")); hapPercentPanel.add(minDisplayField = new NumberTextField(String.valueOf(parent.displayThresh), 3)); minDisplayField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setDisplayThresh(Integer.parseInt(minDisplayField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); hapPercentPanel.add(new JLabel("%")); add(hapPercentPanel); JPanel thinPanel = new JPanel(); thinPanel.add(new JLabel("Connect with thin lines if > ")); thinPanel.add(minThinField = new NumberTextField(String.valueOf(parent.thinThresh), 3)); minThinField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThinThresh(Integer.parseInt(minThinField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thinPanel.add(new JLabel("%")); add(thinPanel); JPanel thickPanel = new JPanel(); thickPanel.add(new JLabel("Connect with thick lines if > ")); thickPanel.add(minThickField = new NumberTextField(String.valueOf(parent.thickThresh), 3)); minThickField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThickThresh(Integer.parseInt(minThickField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thickPanel.add(new JLabel("%")); add(thickPanel); fieldSize = minDisplayField.getPreferredSize(); } |
JPanel thickPanel = new JPanel(); thickPanel.add(new JLabel("Connect with thick lines if > ")); thickPanel.add(minThickField = new NumberTextField(String.valueOf(parent.thickThresh), 3)); minThickField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThickThresh(Integer.parseInt(minThickField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thickPanel.add(new JLabel("%")); add(thickPanel); | JPanel thinPanel = new JPanel(); thinPanel.add(new JLabel("Connect with thin lines if > ")); thinPanel.add(minThinField = new NumberTextField(String.valueOf(parent.thinThresh), 3)); minThinField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThinThresh(Integer.parseInt(minThinField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thinPanel.add(new JLabel("%")); add(thinPanel); | public HaplotypeDisplayController(HaplotypeDisplay parent) { this.parent = parent; setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); JPanel hapPercentPanel = new JPanel(); hapPercentPanel.add(new JLabel("Examine haplotypes above ")); hapPercentPanel.add(minDisplayField = new NumberTextField(String.valueOf(parent.displayThresh), 3)); minDisplayField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setDisplayThresh(Integer.parseInt(minDisplayField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); hapPercentPanel.add(new JLabel("%")); add(hapPercentPanel); JPanel thinPanel = new JPanel(); thinPanel.add(new JLabel("Connect with thin lines if > ")); thinPanel.add(minThinField = new NumberTextField(String.valueOf(parent.thinThresh), 3)); minThinField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThinThresh(Integer.parseInt(minThinField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thinPanel.add(new JLabel("%")); add(thinPanel); JPanel thickPanel = new JPanel(); thickPanel.add(new JLabel("Connect with thick lines if > ")); thickPanel.add(minThickField = new NumberTextField(String.valueOf(parent.thickThresh), 3)); minThickField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThickThresh(Integer.parseInt(minThickField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thickPanel.add(new JLabel("%")); add(thickPanel); fieldSize = minDisplayField.getPreferredSize(); } |
fieldSize = minDisplayField.getPreferredSize(); } | JPanel thickPanel = new JPanel(); thickPanel.add(new JLabel("Connect with thick lines if > ")); thickPanel.add(minThickField = new NumberTextField(String.valueOf(parent.thickThresh), 3)); minThickField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThickThresh(Integer.parseInt(minThickField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thickPanel.add(new JLabel("%")); add(thickPanel); fieldSize = minDisplayField.getPreferredSize(); } | public HaplotypeDisplayController(HaplotypeDisplay parent) { this.parent = parent; setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); JPanel hapPercentPanel = new JPanel(); hapPercentPanel.add(new JLabel("Examine haplotypes above ")); hapPercentPanel.add(minDisplayField = new NumberTextField(String.valueOf(parent.displayThresh), 3)); minDisplayField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setDisplayThresh(Integer.parseInt(minDisplayField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); hapPercentPanel.add(new JLabel("%")); add(hapPercentPanel); JPanel thinPanel = new JPanel(); thinPanel.add(new JLabel("Connect with thin lines if > ")); thinPanel.add(minThinField = new NumberTextField(String.valueOf(parent.thinThresh), 3)); minThinField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThinThresh(Integer.parseInt(minThinField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thinPanel.add(new JLabel("%")); add(thinPanel); JPanel thickPanel = new JPanel(); thickPanel.add(new JLabel("Connect with thick lines if > ")); thickPanel.add(minThickField = new NumberTextField(String.valueOf(parent.thickThresh), 3)); minThickField.getDocument().addDocumentListener(new DocumentListener() { public void insertUpdate(DocumentEvent e) { setThickThresh(Integer.parseInt(minThickField.getText())); } public void changedUpdate(DocumentEvent e) { } public void removeUpdate(DocumentEvent e) { } }); thickPanel.add(new JLabel("%")); add(thickPanel); fieldSize = minDisplayField.getPreferredSize(); } |
*/ | public static JellyInterpreter getInterpreter(JellyContext context) throws EvalError { JellyInterpreter interpreter = (JellyInterpreter) context.getVariable( "org.apache.commons.jelly.beanshell.JellyInterpreter" ); if ( interpreter == null ) { interpreter = new JellyInterpreter(); interpreter.setJellyContext(context); context.setVariable( "org.apache.commons.jelly.beanshell.JellyInterpreter", interpreter ); } return interpreter; } |
|
logger.debug("Starting export of tables: "+tablesToExport); | public void export(List<SQLTable> tablesToExport) throws SQLException, ArchitectException { finished = false; hasStarted = true; Connection con = null; try { currentDB = tablesToExport; // first, set the logWriter logWriter = new LogWriter(ArchitectSession.getInstance().getUserSettings().getETLUserSettings().getString( ETLUserSettings.PROP_ETL_LOG_PATH, "")); SQLDatabase repository = new SQLDatabase(repositoryDataSource); // we // are // exporting // db // into // this con = repository.getConnection(); try { defParam = new DefaultParameters(con); } catch (PLSchemaException p) { throw new ArchitectException("couldn't load default parameters", p); } SQLDatabase target = new SQLDatabase(targetDataSource); Connection tCon = target.getConnection(); exportResultList.add(new LabelValueBean("\n Creating Power Loader Job", "\n")); sm = null; for (int tryNum = 0; tryNum < 3 && sm == null; tryNum++) { String username; if (tryNum == 1) { username = repositoryDataSource.get(ArchitectDataSource.PL_UID).toUpperCase(); } else if (tryNum == 2) { username = repositoryDataSource.get(ArchitectDataSource.PL_UID).toLowerCase(); } else { username = repositoryDataSource.get(ArchitectDataSource.PL_UID); } try { // don't need to verify passwords in client apps (as opposed // to webapps) sm = new PLSecurityManager(con, username, repositoryDataSource.get(ArchitectDataSource.PL_PWD), false); } catch (PLSecurityException se) { logger.debug("Couldn't find pl user " + username, se); } } if (sm == null) { throw new ArchitectException("Could not find login for: " + repositoryDataSource.get(ArchitectDataSource.PL_UID)); } logWriter.info("Starting creation of job <" + jobId + "> at " + new java.util.Date(System.currentTimeMillis())); logWriter.info("Connected to database: " + repositoryDataSource.toString()); maybeInsertFolder(con); PLJob job = new PLJob(jobId); insertJob(con); insertFolderDetail(con, job.getObjectType(), job.getObjectName()); // This will order the target tables so that the parent tables are // loaded // before their children DepthFirstSearch targetDFS = new DepthFirstSearch(tablesToExport); List tables = targetDFS.getFinishOrder(); if (logger.isDebugEnabled()) { StringBuffer tableOrder = new StringBuffer(); Iterator dit = tables.iterator(); while (dit.hasNext()) { tableOrder.append(((SQLTable) dit.next()).getName()).append(", "); } logger.debug("Safe load order for job is: " + tableOrder); } int outputTableNum = 1; Hashtable inputTables = new Hashtable(); Iterator targetTableIt = tables.iterator(); while (targetTableIt.hasNext()) { tableCount++; SQLTable outputTable = (SQLTable) targetTableIt.next(); // reset loop variables for each output table boolean createdOutputTableMetaData = false; int transNum = 0; int seqNum = 1; // borrowed from insertColumnMappings, not sure // if this is significant... String transName = null; String outputTableId = null; String inputTableId = null; // Iterator cols = outputTable.getColumns().iterator(); while (cols.hasNext()) { SQLColumn outputCol = (SQLColumn) cols.next(); SQLColumn inputCol = outputCol.getSourceColumn(); if (inputCol != null && !inputTables.keySet().contains(inputCol.getParentTable())) { // create transaction and input table meta data here if // we need to SQLTable inputTable = inputCol.getParentTable(); String baseTransName = PLUtils.toPLIdentifier("LOAD_" + outputTable.getName()); transNum = generateUniqueTransIdx(con, baseTransName); transName = baseTransName + "_" + transNum; logger.debug("transName: " + transName); insertTrans(con, transName, outputTable.getRemarks()); insertFolderDetail(con, "TRANSACTION", transName); insertTransExceptHandler(con, "A", transName, tCon); // error // handling // is // w.r.t. // target // database insertTransExceptHandler(con, "U", transName, tCon); // error // handling // is // w.r.t. // target // database insertTransExceptHandler(con, "D", transName, tCon); // error // handling // is // w.r.t. // target // database insertJobDetail(con, outputTableNum * 10, "TRANSACTION", transName); inputTableId = PLUtils.toPLIdentifier(inputTable.getName() + "_IN_" + transNum); logger.debug("inputTableId: " + inputTableId); insertTransTableFile(con, transName, inputTableId, inputTable, false, transNum); inputTables.put(inputTable, new PLTransaction(transName, inputTableId, transNum)); } else { // restore input/transaction variables PLTransaction plt = (PLTransaction) inputTables.get(inputCol.getParentTable()); transName = plt.getName(); inputTableId = plt.getInputTableId(); transNum = plt.getTransNum(); } if (!createdOutputTableMetaData) { // create output table meta data once logger.debug("outputTableNum: " + outputTableNum); outputTableId = PLUtils.toPLIdentifier(outputTable.getName() + "_OUT_" + outputTableNum); logger.debug("outputTableId: " + outputTableId); insertTransTableFile(con, transName, outputTableId, outputTable, true, transNum); createdOutputTableMetaData = true; } // finally, insert the mapping for this column if (inputCol != null) { // note: output proc seq num appears to be meaningless // based on what the Power Loader // does after you view generated transaction in the VB // Front end. insertTransColMap(con, transName, outputTableId, outputCol, inputTableId, seqNum * 10); } seqNum++; } outputTableNum++; // moved out of inner loop } } finally { hasStarted = false; finished = true; currentDB = null; // close and flush the logWriter (and set the reference to null) logWriter.flush(); logWriter.close(); logWriter = null; try { if (con != null) con.close(); } catch (SQLException e) { logger.error("Couldn't close connection", e); } } } |
|
pp.setMouseMode(PlayPen.mouseModeType.CREATING_TABLE); | pp.setMouseMode(PlayPen.MouseModeType.CREATING_TABLE); | public void actionPerformed(ActionEvent evt) { SQLTable t = new SQLTable(); try { t.initFolders(true); } catch (ArchitectException e) { logger.error("Couldn't add folder to table \""+t.getName()+"\"", e); JOptionPane.showMessageDialog(null, "Failed to add folder to table:\n"+e.getMessage()); } t.setName("New_Table"); TablePane tp = new TablePane(t, pp); pp.addFloating(tp); pp.setMouseMode(PlayPen.mouseModeType.CREATING_TABLE); } |
"<param name=\"remoteURL\" value=\"http: | "<param name=\"remoteURL\" value=\""+context.getServerPath()+"/app/fetchAttributeValues.do;jsessionid={2}\"></param>").append( | public String draw(DashboardContext context) { ApplicationConfig appConfig = context.getWebContext().getApplicationConfig(); assert appConfig != null: "No application context present"; try{ StringBuffer graphComponent = new StringBuffer().append( "<applet code=\"org/jmanage/webui/applets/GraphApplet.class\"").append( " width=\"{0}\" height=\"{1}\"").append( " archive=\"/applets/applets.jar,/applets/jfreechart-0.9.20.jar,").append( "/applets/jcommon-0.9.5.jar\" >").append( "<param name=\"graphTitle\" value=\""+getName()+"\"></param>").append( "<param name=\"pollingInterval\" value=\""+getPollingIntervalInSeconds()+"\"></param>").append( "<param name=\"remoteURL\" value=\"http://localhost:9090/app/fetchAttributeValues.do;jsessionid={2}\"></param>").append( "<param name=\"displayNames\" value=\"").append(getAttributeDisplayNamesForGraph()).append("\"></param>").append( "<param name=\"attributes\" value=\"").append(getAttributesForGraph(appConfig.getName())).append("\"></param>").append( "<param value=\"\" name=\"yAxisLabel\"></param>").append("</applet>"); return graphComponent.toString(); }catch(Exception e){ return "Failure rendering component"; } } |
EM(Vector chromosomes, int numTrios){ | EM(Vector chromosomes, int numTrios, Vector extraInds){ | EM(Vector chromosomes, int numTrios){ //we need to add extra copies of haploid chromosomes so we add a second copy this.chromosomes = new Vector(); for(int i=0;i<chromosomes.size();i++) { this.chromosomes.add(chromosomes.elementAt(i)); if(((Chromosome)this.chromosomes.lastElement()).isHaploid()){ this.chromosomes.add(chromosomes.elementAt(i)); } } this.numTrios = numTrios; } |
this.numTrios = numTrios; | this.numTrios = numTrios + extraTrioCount; | EM(Vector chromosomes, int numTrios){ //we need to add extra copies of haploid chromosomes so we add a second copy this.chromosomes = new Vector(); for(int i=0;i<chromosomes.size();i++) { this.chromosomes.add(chromosomes.elementAt(i)); if(((Chromosome)this.chromosomes.lastElement()).isHaploid()){ this.chromosomes.add(chromosomes.elementAt(i)); } } this.numTrios = numTrios; } |
updatedExtraTrioCount = 0; | public void doEM(int[] theBlock) throws HaploViewException{ //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } byte[] thisHap; Vector inputHaploSingletons = new Vector(); Vector inputHaploTrios = new Vector(); Vector affSingletons = new Vector(); Vector affTrios = new Vector(); Vector affKids = new Vector(); Vector haploidTrios = new Vector(); Vector haploidSingletons = new Vector(); //whichVector[i] stores a value which indicates which vector chromosome i's genotype should go in //1 indicates inputHaploSingletons (singletons), 2 indicates inputHaploTrios, //3 indicates a person from a broken trio who is treated as a singleton //0 indicates none (too much missing data) int[] whichVector = new int[chromosomes.size()]; for(int i=0;i<numTrios*4; i+=4) { Chromosome parentAFirst = (Chromosome) chromosomes.elementAt(i); Chromosome parentASecond = (Chromosome) chromosomes.elementAt(i+1); Chromosome parentBFirst = (Chromosome) chromosomes.elementAt(i+2); Chromosome parentBSecond = (Chromosome) chromosomes.elementAt(i+3); boolean tooManyMissingInASegmentA = false; boolean tooManyMissingInASegmentB = false; int totalMissingA = 0; int totalMissingB = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missingA = 0; int missingB = 0; for (int j = 0; j < block_size[n]; j++){ byte AFirstGeno = parentAFirst.getGenotype(theBlock[segmentShift+j]); byte ASecondGeno = parentASecond.getGenotype(theBlock[segmentShift+j]); byte BFirstGeno = parentBFirst.getGenotype(theBlock[segmentShift+j]); byte BSecondGeno = parentBSecond.getGenotype(theBlock[segmentShift+j]); if(AFirstGeno == 0 || ASecondGeno == 0) missingA++; if(BFirstGeno == 0 || BSecondGeno == 0) missingB++; } segmentShift += block_size[n]; if (missingA >= MISSINGLIMIT){ tooManyMissingInASegmentA = true; } if (missingB >= MISSINGLIMIT){ tooManyMissingInASegmentB = true; } totalMissingA += missingA; totalMissingB += missingB; } if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3 && !tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //both parents are good so all 4 chroms are added as a trio whichVector[i] = 2; whichVector[i+1] = 2; whichVector[i+2] = 2; whichVector[i+3] = 2; } else if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3) { //first person good, so he's added as a singleton, other parent is dropped whichVector[i] = 3; whichVector[i+1] =3; whichVector[i+2] =0; whichVector[i+3]=0; } else if(!tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //second person good, so he's added as a singleton, other parent is dropped whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =3; whichVector[i+3]=3; } else { //both people have too much missing data so neither is used whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =0; whichVector[i+3]=0; } } for (int i = numTrios*4; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); boolean tooManyMissingInASegment = false; int totalMissing = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missing = 0; for (int j = 0; j < block_size[n]; j++){ byte theGeno = thisChrom.getGenotype(theBlock[segmentShift+j]); byte nextGeno = nextChrom.getGenotype(theBlock[segmentShift+j]); if(theGeno == 0 || nextGeno == 0) missing++; } segmentShift += block_size[n]; if (missing >= MISSINGLIMIT){ tooManyMissingInASegment = true; } totalMissing += missing; } //we want to use chromosomes without too many missing genotypes in a given //subsegment (first term) or without too many missing genotypes in the //whole block (second term) if (!tooManyMissingInASegment && totalMissing <= 1+theBlock.length/3){ whichVector[i-1] = 1; whichVector[i] = 1; } } //we only want to add an affected status every other chromosome, so we flip this boolean each time boolean addAff = true; for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); if(whichVector[i] > 0) { thisHap = new byte[theBlock.length]; for (int j = 0; j < theBlock.length; j++){ byte a1 = Chromosome.getMarker(theBlock[j]).getMajor(); byte a2 = Chromosome.getMarker(theBlock[j]).getMinor(); byte theGeno = thisChrom.getGenotype(theBlock[j]); if (theGeno >= 5){ thisHap[j] = 'h'; } else { if (theGeno == 0){ thisHap[j] = '0'; }else if (theGeno == a1){ thisHap[j] = '1'; }else if (theGeno == a2){ thisHap[j] = '2'; }else{ throw new HaploViewException("Marker with > 2 alleles: " + Chromosome.getMarker(theBlock[j]).getDisplayName()); } } } if(whichVector[i] == 1) { inputHaploSingletons.add(thisHap); if(addAff) { affSingletons.add(new Integer(thisChrom.getAffected())); haploidSingletons.add(new Boolean(thisChrom.isHaploid())); } } else if(whichVector[i] ==2) { inputHaploTrios.add(thisHap); if(addAff) { affTrios.add(new Integer(thisChrom.getAffected())); affKids.add(thisChrom.getKidAffected()); haploidTrios.add(new Boolean(thisChrom.isHaploid())); } }else if (whichVector[i] == 3){ inputHaploSingletons.add(thisHap); if(addAff) { affSingletons.add(new Integer(0)); haploidSingletons.add(new Boolean(thisChrom.isHaploid())); } } if(addAff) { addAff = false; } else { addAff =true; } } } numFilteredTrios = inputHaploTrios.size() / 4; inputHaploTrios.addAll(inputHaploSingletons); affTrios.addAll(affSingletons); byte[][] input_haplos = (byte[][])inputHaploTrios.toArray(new byte[0][0]); haploidTrios.addAll(haploidSingletons); haploid = new boolean[haploidTrios.size()]; for(int i=0;i<haploidTrios.size();i++){ haploid[i] = ((Boolean)haploidTrios.elementAt(i)).booleanValue(); } full_em_breakup(input_haplos, block_size, affTrios, affKids); } |
|
} else if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3) { | if (i/4 < extraTrioCount){ updatedExtraTrioCount++; } } else if((!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3) && i/4 >= extraTrioCount ) { | public void doEM(int[] theBlock) throws HaploViewException{ //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } byte[] thisHap; Vector inputHaploSingletons = new Vector(); Vector inputHaploTrios = new Vector(); Vector affSingletons = new Vector(); Vector affTrios = new Vector(); Vector affKids = new Vector(); Vector haploidTrios = new Vector(); Vector haploidSingletons = new Vector(); //whichVector[i] stores a value which indicates which vector chromosome i's genotype should go in //1 indicates inputHaploSingletons (singletons), 2 indicates inputHaploTrios, //3 indicates a person from a broken trio who is treated as a singleton //0 indicates none (too much missing data) int[] whichVector = new int[chromosomes.size()]; for(int i=0;i<numTrios*4; i+=4) { Chromosome parentAFirst = (Chromosome) chromosomes.elementAt(i); Chromosome parentASecond = (Chromosome) chromosomes.elementAt(i+1); Chromosome parentBFirst = (Chromosome) chromosomes.elementAt(i+2); Chromosome parentBSecond = (Chromosome) chromosomes.elementAt(i+3); boolean tooManyMissingInASegmentA = false; boolean tooManyMissingInASegmentB = false; int totalMissingA = 0; int totalMissingB = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missingA = 0; int missingB = 0; for (int j = 0; j < block_size[n]; j++){ byte AFirstGeno = parentAFirst.getGenotype(theBlock[segmentShift+j]); byte ASecondGeno = parentASecond.getGenotype(theBlock[segmentShift+j]); byte BFirstGeno = parentBFirst.getGenotype(theBlock[segmentShift+j]); byte BSecondGeno = parentBSecond.getGenotype(theBlock[segmentShift+j]); if(AFirstGeno == 0 || ASecondGeno == 0) missingA++; if(BFirstGeno == 0 || BSecondGeno == 0) missingB++; } segmentShift += block_size[n]; if (missingA >= MISSINGLIMIT){ tooManyMissingInASegmentA = true; } if (missingB >= MISSINGLIMIT){ tooManyMissingInASegmentB = true; } totalMissingA += missingA; totalMissingB += missingB; } if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3 && !tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //both parents are good so all 4 chroms are added as a trio whichVector[i] = 2; whichVector[i+1] = 2; whichVector[i+2] = 2; whichVector[i+3] = 2; } else if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3) { //first person good, so he's added as a singleton, other parent is dropped whichVector[i] = 3; whichVector[i+1] =3; whichVector[i+2] =0; whichVector[i+3]=0; } else if(!tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //second person good, so he's added as a singleton, other parent is dropped whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =3; whichVector[i+3]=3; } else { //both people have too much missing data so neither is used whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =0; whichVector[i+3]=0; } } for (int i = numTrios*4; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); boolean tooManyMissingInASegment = false; int totalMissing = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missing = 0; for (int j = 0; j < block_size[n]; j++){ byte theGeno = thisChrom.getGenotype(theBlock[segmentShift+j]); byte nextGeno = nextChrom.getGenotype(theBlock[segmentShift+j]); if(theGeno == 0 || nextGeno == 0) missing++; } segmentShift += block_size[n]; if (missing >= MISSINGLIMIT){ tooManyMissingInASegment = true; } totalMissing += missing; } //we want to use chromosomes without too many missing genotypes in a given //subsegment (first term) or without too many missing genotypes in the //whole block (second term) if (!tooManyMissingInASegment && totalMissing <= 1+theBlock.length/3){ whichVector[i-1] = 1; whichVector[i] = 1; } } //we only want to add an affected status every other chromosome, so we flip this boolean each time boolean addAff = true; for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); if(whichVector[i] > 0) { thisHap = new byte[theBlock.length]; for (int j = 0; j < theBlock.length; j++){ byte a1 = Chromosome.getMarker(theBlock[j]).getMajor(); byte a2 = Chromosome.getMarker(theBlock[j]).getMinor(); byte theGeno = thisChrom.getGenotype(theBlock[j]); if (theGeno >= 5){ thisHap[j] = 'h'; } else { if (theGeno == 0){ thisHap[j] = '0'; }else if (theGeno == a1){ thisHap[j] = '1'; }else if (theGeno == a2){ thisHap[j] = '2'; }else{ throw new HaploViewException("Marker with > 2 alleles: " + Chromosome.getMarker(theBlock[j]).getDisplayName()); } } } if(whichVector[i] == 1) { inputHaploSingletons.add(thisHap); if(addAff) { affSingletons.add(new Integer(thisChrom.getAffected())); haploidSingletons.add(new Boolean(thisChrom.isHaploid())); } } else if(whichVector[i] ==2) { inputHaploTrios.add(thisHap); if(addAff) { affTrios.add(new Integer(thisChrom.getAffected())); affKids.add(thisChrom.getKidAffected()); haploidTrios.add(new Boolean(thisChrom.isHaploid())); } }else if (whichVector[i] == 3){ inputHaploSingletons.add(thisHap); if(addAff) { affSingletons.add(new Integer(0)); haploidSingletons.add(new Boolean(thisChrom.isHaploid())); } } if(addAff) { addAff = false; } else { addAff =true; } } } numFilteredTrios = inputHaploTrios.size() / 4; inputHaploTrios.addAll(inputHaploSingletons); affTrios.addAll(affSingletons); byte[][] input_haplos = (byte[][])inputHaploTrios.toArray(new byte[0][0]); haploidTrios.addAll(haploidSingletons); haploid = new boolean[haploidTrios.size()]; for(int i=0;i<haploidTrios.size();i++){ haploid[i] = ((Boolean)haploidTrios.elementAt(i)).booleanValue(); } full_em_breakup(input_haplos, block_size, affTrios, affKids); } |
else if(!tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { | else if(!tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3 && i/4 >= extraTrioCount){ | public void doEM(int[] theBlock) throws HaploViewException{ //break up large blocks if needed int[] block_size; if (theBlock.length < 9){ block_size = new int[1]; block_size[0] = theBlock.length; } else { //some base-8 arithmetic int ones = theBlock.length%8; int eights = (theBlock.length - ones)/8; if (ones == 0){ block_size = new int[eights]; for (int i = 0; i < eights; i++){ block_size[i]=8; } } else { block_size = new int[eights+1]; for (int i = 0; i < eights-1; i++){ block_size[i]=8; } block_size[eights-1] = (8+ones)/2; block_size[eights] = 8+ones-block_size[eights-1]; } } byte[] thisHap; Vector inputHaploSingletons = new Vector(); Vector inputHaploTrios = new Vector(); Vector affSingletons = new Vector(); Vector affTrios = new Vector(); Vector affKids = new Vector(); Vector haploidTrios = new Vector(); Vector haploidSingletons = new Vector(); //whichVector[i] stores a value which indicates which vector chromosome i's genotype should go in //1 indicates inputHaploSingletons (singletons), 2 indicates inputHaploTrios, //3 indicates a person from a broken trio who is treated as a singleton //0 indicates none (too much missing data) int[] whichVector = new int[chromosomes.size()]; for(int i=0;i<numTrios*4; i+=4) { Chromosome parentAFirst = (Chromosome) chromosomes.elementAt(i); Chromosome parentASecond = (Chromosome) chromosomes.elementAt(i+1); Chromosome parentBFirst = (Chromosome) chromosomes.elementAt(i+2); Chromosome parentBSecond = (Chromosome) chromosomes.elementAt(i+3); boolean tooManyMissingInASegmentA = false; boolean tooManyMissingInASegmentB = false; int totalMissingA = 0; int totalMissingB = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missingA = 0; int missingB = 0; for (int j = 0; j < block_size[n]; j++){ byte AFirstGeno = parentAFirst.getGenotype(theBlock[segmentShift+j]); byte ASecondGeno = parentASecond.getGenotype(theBlock[segmentShift+j]); byte BFirstGeno = parentBFirst.getGenotype(theBlock[segmentShift+j]); byte BSecondGeno = parentBSecond.getGenotype(theBlock[segmentShift+j]); if(AFirstGeno == 0 || ASecondGeno == 0) missingA++; if(BFirstGeno == 0 || BSecondGeno == 0) missingB++; } segmentShift += block_size[n]; if (missingA >= MISSINGLIMIT){ tooManyMissingInASegmentA = true; } if (missingB >= MISSINGLIMIT){ tooManyMissingInASegmentB = true; } totalMissingA += missingA; totalMissingB += missingB; } if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3 && !tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //both parents are good so all 4 chroms are added as a trio whichVector[i] = 2; whichVector[i+1] = 2; whichVector[i+2] = 2; whichVector[i+3] = 2; } else if(!tooManyMissingInASegmentA && totalMissingA <= 1+theBlock.length/3) { //first person good, so he's added as a singleton, other parent is dropped whichVector[i] = 3; whichVector[i+1] =3; whichVector[i+2] =0; whichVector[i+3]=0; } else if(!tooManyMissingInASegmentB && totalMissingB <= 1+theBlock.length/3) { //second person good, so he's added as a singleton, other parent is dropped whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =3; whichVector[i+3]=3; } else { //both people have too much missing data so neither is used whichVector[i] = 0; whichVector[i+1] =0; whichVector[i+2] =0; whichVector[i+3]=0; } } for (int i = numTrios*4; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); Chromosome nextChrom = (Chromosome)chromosomes.elementAt(++i); boolean tooManyMissingInASegment = false; int totalMissing = 0; int segmentShift = 0; for (int n = 0; n < block_size.length; n++){ int missing = 0; for (int j = 0; j < block_size[n]; j++){ byte theGeno = thisChrom.getGenotype(theBlock[segmentShift+j]); byte nextGeno = nextChrom.getGenotype(theBlock[segmentShift+j]); if(theGeno == 0 || nextGeno == 0) missing++; } segmentShift += block_size[n]; if (missing >= MISSINGLIMIT){ tooManyMissingInASegment = true; } totalMissing += missing; } //we want to use chromosomes without too many missing genotypes in a given //subsegment (first term) or without too many missing genotypes in the //whole block (second term) if (!tooManyMissingInASegment && totalMissing <= 1+theBlock.length/3){ whichVector[i-1] = 1; whichVector[i] = 1; } } //we only want to add an affected status every other chromosome, so we flip this boolean each time boolean addAff = true; for (int i = 0; i < chromosomes.size(); i++){ Chromosome thisChrom = (Chromosome)chromosomes.elementAt(i); if(whichVector[i] > 0) { thisHap = new byte[theBlock.length]; for (int j = 0; j < theBlock.length; j++){ byte a1 = Chromosome.getMarker(theBlock[j]).getMajor(); byte a2 = Chromosome.getMarker(theBlock[j]).getMinor(); byte theGeno = thisChrom.getGenotype(theBlock[j]); if (theGeno >= 5){ thisHap[j] = 'h'; } else { if (theGeno == 0){ thisHap[j] = '0'; }else if (theGeno == a1){ thisHap[j] = '1'; }else if (theGeno == a2){ thisHap[j] = '2'; }else{ throw new HaploViewException("Marker with > 2 alleles: " + Chromosome.getMarker(theBlock[j]).getDisplayName()); } } } if(whichVector[i] == 1) { inputHaploSingletons.add(thisHap); if(addAff) { affSingletons.add(new Integer(thisChrom.getAffected())); haploidSingletons.add(new Boolean(thisChrom.isHaploid())); } } else if(whichVector[i] ==2) { inputHaploTrios.add(thisHap); if(addAff) { affTrios.add(new Integer(thisChrom.getAffected())); affKids.add(thisChrom.getKidAffected()); haploidTrios.add(new Boolean(thisChrom.isHaploid())); } }else if (whichVector[i] == 3){ inputHaploSingletons.add(thisHap); if(addAff) { affSingletons.add(new Integer(0)); haploidSingletons.add(new Boolean(thisChrom.isHaploid())); } } if(addAff) { addAff = false; } else { addAff =true; } } } numFilteredTrios = inputHaploTrios.size() / 4; inputHaploTrios.addAll(inputHaploSingletons); affTrios.addAll(affSingletons); byte[][] input_haplos = (byte[][])inputHaploTrios.toArray(new byte[0][0]); haploidTrios.addAll(haploidSingletons); haploid = new boolean[haploidTrios.size()]; for(int i=0;i<haploidTrios.size();i++){ haploid[i] = ((Boolean)haploidTrios.elementAt(i)).booleanValue(); } full_em_breakup(input_haplos, block_size, affTrios, affKids); } |
if (data[i].nposs==1) { | if (data[i].nposs==1 && i >= updatedExtraTrioCount*2) { | private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus, Vector kidAffStatus) throws HaploViewException{ int num_poss, iter; double total = 0; int block, start_locus, end_locus, biggest_block_size; int num_indivs = 0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> "+MAXLOCI+" non-redundant)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); if (!haploid[i]){ probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; }else{ total+=1.0; } } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); if(haploid[i]){ if (tempRec.h1 == tempRec.h2){ //for haploids we only consider reconstructions where both chroms are equal, //since those are the only truly possible ones (i.e. heterozygous reconstructions //are mistakes) tempRec.p = (float)(probMap.get(new Long(tempRec.h1))); }else{ tempRec.p = 0; } }else { tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); } total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); if (!haploid[i]){ probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); }else{ total += tempRec.p; } } } probMap.normalize(total); iter++; } int m=0; for(long j=0;j<num_poss; j++){ hint[(int)j]=-1; if (probMap.get(new Long(j)) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=(int)j; hprob[block][m]=probMap.get(new Long(j)); hint[(int)j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ double poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) */ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total = poss_full * PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); if (!haploid[i]){ fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; }else{ total+=1.0; } } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { if(haploid[i]){ if (superdata[i].superposs[k].h1 == superdata[i].superposs[k].h2){ //only consider reconstructions of haploid chromosomes where h1 == h2 //since heterozygous reconstructions aren't possible for haploids superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))); }else{ superdata[i].superposs[k].p = 0; } }else{ superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); } total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); if(!haploid[i]){ fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); }else{ total += superdata[i].superposs[k].p; } } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)kidAffStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; realKidAffectedStatus = kidAffStatus; doAssociationTests(affStatus, null,null, kidAffStatus); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); ArrayList keys = new ArrayList(fullProbMap.theMap.keySet()); Collections.sort(keys); Iterator kitr = keys.iterator(); while(kitr.hasNext()) { Object key = kitr.next(); long keyLong = ((Long)key).longValue(); if(fullProbMap.get(key) > .001) { haplos_present.addElement(decode_haplo_str(keyLong,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(key))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; } |
for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); if (!haploid[i]){ probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); }else{ total += tempRec.p; | if(i >= updatedExtraTrioCount*2){ for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); if (!haploid[i]){ probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); }else{ total += tempRec.p; } | private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus, Vector kidAffStatus) throws HaploViewException{ int num_poss, iter; double total = 0; int block, start_locus, end_locus, biggest_block_size; int num_indivs = 0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> "+MAXLOCI+" non-redundant)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); if (!haploid[i]){ probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; }else{ total+=1.0; } } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); if(haploid[i]){ if (tempRec.h1 == tempRec.h2){ //for haploids we only consider reconstructions where both chroms are equal, //since those are the only truly possible ones (i.e. heterozygous reconstructions //are mistakes) tempRec.p = (float)(probMap.get(new Long(tempRec.h1))); }else{ tempRec.p = 0; } }else { tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); } total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); if (!haploid[i]){ probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); }else{ total += tempRec.p; } } } probMap.normalize(total); iter++; } int m=0; for(long j=0;j<num_poss; j++){ hint[(int)j]=-1; if (probMap.get(new Long(j)) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=(int)j; hprob[block][m]=probMap.get(new Long(j)); hint[(int)j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ double poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) */ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total = poss_full * PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); if (!haploid[i]){ fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; }else{ total+=1.0; } } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { if(haploid[i]){ if (superdata[i].superposs[k].h1 == superdata[i].superposs[k].h2){ //only consider reconstructions of haploid chromosomes where h1 == h2 //since heterozygous reconstructions aren't possible for haploids superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))); }else{ superdata[i].superposs[k].p = 0; } }else{ superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); } total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); if(!haploid[i]){ fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); }else{ total += superdata[i].superposs[k].p; } } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)kidAffStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; realKidAffectedStatus = kidAffStatus; doAssociationTests(affStatus, null,null, kidAffStatus); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); ArrayList keys = new ArrayList(fullProbMap.theMap.keySet()); Collections.sort(keys); Iterator kitr = keys.iterator(); while(kitr.hasNext()) { Object key = kitr.next(); long keyLong = ((Long)key).longValue(); if(fullProbMap.get(key) > .001) { haplos_present.addElement(decode_haplo_str(keyLong,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(key))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; } |
if (superdata[i].nsuper==1) { | if (superdata[i].nsuper==1 && i >= updatedExtraTrioCount*2) { | private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus, Vector kidAffStatus) throws HaploViewException{ int num_poss, iter; double total = 0; int block, start_locus, end_locus, biggest_block_size; int num_indivs = 0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> "+MAXLOCI+" non-redundant)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); if (!haploid[i]){ probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; }else{ total+=1.0; } } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); if(haploid[i]){ if (tempRec.h1 == tempRec.h2){ //for haploids we only consider reconstructions where both chroms are equal, //since those are the only truly possible ones (i.e. heterozygous reconstructions //are mistakes) tempRec.p = (float)(probMap.get(new Long(tempRec.h1))); }else{ tempRec.p = 0; } }else { tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); } total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); if (!haploid[i]){ probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); }else{ total += tempRec.p; } } } probMap.normalize(total); iter++; } int m=0; for(long j=0;j<num_poss; j++){ hint[(int)j]=-1; if (probMap.get(new Long(j)) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=(int)j; hprob[block][m]=probMap.get(new Long(j)); hint[(int)j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ double poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) */ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total = poss_full * PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); if (!haploid[i]){ fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; }else{ total+=1.0; } } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { if(haploid[i]){ if (superdata[i].superposs[k].h1 == superdata[i].superposs[k].h2){ //only consider reconstructions of haploid chromosomes where h1 == h2 //since heterozygous reconstructions aren't possible for haploids superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))); }else{ superdata[i].superposs[k].p = 0; } }else{ superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); } total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); if(!haploid[i]){ fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); }else{ total += superdata[i].superposs[k].p; } } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)kidAffStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; realKidAffectedStatus = kidAffStatus; doAssociationTests(affStatus, null,null, kidAffStatus); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); ArrayList keys = new ArrayList(fullProbMap.theMap.keySet()); Collections.sort(keys); Iterator kitr = keys.iterator(); while(kitr.hasNext()) { Object key = kitr.next(); long keyLong = ((Long)key).longValue(); if(fullProbMap.get(key) > .001) { haplos_present.addElement(decode_haplo_str(keyLong,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(key))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; } |
for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); if(!haploid[i]){ fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); }else{ total += superdata[i].superposs[k].p; | if(i >= updatedExtraTrioCount*2){ for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); if(!haploid[i]){ fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); }else{ total += superdata[i].superposs[k].p; } | private void full_em_breakup( byte[][] input_haplos, int[] block_size, Vector affStatus, Vector kidAffStatus) throws HaploViewException{ int num_poss, iter; double total = 0; int block, start_locus, end_locus, biggest_block_size; int num_indivs = 0; int num_blocks = block_size.length; int num_haplos = input_haplos.length; int num_loci = input_haplos[0].length; Recovery tempRec; if (num_loci > MAXLOCI){ throw new HaploViewException("Too many loci in a single block (> "+MAXLOCI+" non-redundant)"); } //figure out the size of the biggest block biggest_block_size=block_size[0]; for (int i=1; i<num_blocks; i++) { if (block_size[i] > biggest_block_size) biggest_block_size=block_size[i]; } num_poss = two_n[biggest_block_size]; data = new OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) data[i]= new OBS(); superdata = new SUPER_OBS[num_haplos/2]; for (int i=0; i<num_haplos/2; i++) superdata[i]= new SUPER_OBS(num_blocks); double[][] hprob = new double[num_blocks][num_poss]; int[][] hlist = new int[num_blocks][num_poss]; int[] num_hlist = new int[num_blocks]; int[] hint = new int[num_poss]; MapWrap probMap = new MapWrap(PSEUDOCOUNT); /* for trio option */ if (Options.getAssocTest() == ASSOC_TRIO) { ambighet = new int[(num_haplos/4)][num_loci]; store_dhet_status(num_haplos,num_loci,input_haplos); } end_locus=-1; //now we loop through the blocks for (block=0; block<num_blocks; block++) { start_locus=end_locus+1; end_locus=start_locus+block_size[block]-1; num_poss=two_n[block_size[block]]; //read_observations initializes the values in data[] (array of OBS) num_indivs=read_observations(num_haplos,num_loci,input_haplos,start_locus,end_locus); total=(double)num_poss; total *= PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (data[i].nposs==1) { tempRec = (Recovery)data[i].poss.elementAt(0); probMap.put(new Long(tempRec.h1), probMap.get(new Long(tempRec.h1)) + 1.0); if (!haploid[i]){ probMap.put(new Long(tempRec.h2), probMap.get(new Long(tempRec.h2)) + 1.0); total+=2.0; }else{ total+=1.0; } } } probMap.normalize(total); // EM LOOP: assign ambiguous data based on p, then re-estimate p iter=0; while (iter<20) { // compute probabilities of each possible observation for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); if(haploid[i]){ if (tempRec.h1 == tempRec.h2){ //for haploids we only consider reconstructions where both chroms are equal, //since those are the only truly possible ones (i.e. heterozygous reconstructions //are mistakes) tempRec.p = (float)(probMap.get(new Long(tempRec.h1))); }else{ tempRec.p = 0; } }else { tempRec.p = (float)(probMap.get(new Long(tempRec.h1))*probMap.get(new Long(tempRec.h2))); } total+=tempRec.p; } // normalize for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); tempRec.p /= total; } } // re-estimate prob probMap = new MapWrap(1e-10); total=num_poss*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<data[i].nposs; k++) { tempRec = (Recovery) data[i].poss.elementAt(k); probMap.put(new Long(tempRec.h1),probMap.get(new Long(tempRec.h1)) + tempRec.p); if (!haploid[i]){ probMap.put(new Long(tempRec.h2),probMap.get(new Long(tempRec.h2)) + tempRec.p); total+=(2.0*(tempRec.p)); }else{ total += tempRec.p; } } } probMap.normalize(total); iter++; } int m=0; for(long j=0;j<num_poss; j++){ hint[(int)j]=-1; if (probMap.get(new Long(j)) > .001) { // printf("haplo %s p = %.4lf\n",haplo_str(j,block_size[block]),prob[j]); hlist[block][m]=(int)j; hprob[block][m]=probMap.get(new Long(j)); hint[(int)j]=m; m++; } } num_hlist[block]=m; // store current block results in super obs structure store_block_haplos(hlist, hprob, hint, block, num_indivs); } /* for each block */ double poss_full=1; for (block=0; block<num_blocks; block++) { poss_full *= num_hlist[block]; } /* LIGATE and finish this mess :) */ fullProbMap = new MapWrap(PSEUDOCOUNT); create_super_haplos(num_indivs,num_blocks,num_hlist); /* run standard EM on supercombos */ /* start prob array with probabilities from full observations */ total = poss_full * PSEUDOCOUNT; /* starting prob is phase known haps + 0.1 (PSEUDOCOUNT) count of every haplotype - i.e., flat when nothing is known, close to phase known if a great deal is known */ for (int i=0; i<num_indivs; i++) { if (superdata[i].nsuper==1) { Long h1 = new Long(superdata[i].superposs[0].h1); Long h2 = new Long(superdata[i].superposs[0].h2); fullProbMap.put(h1,fullProbMap.get(h1) +1.0); if (!haploid[i]){ fullProbMap.put(h2,fullProbMap.get(h2) +1.0); total+=2.0; }else{ total+=1.0; } } } fullProbMap.normalize(total); /* EM LOOP: assign ambiguous data based on p, then re-estimate p */ iter=0; while (iter<20) { /* compute probabilities of each possible observation */ for (int i=0; i<num_indivs; i++) { total=0.0; for (int k=0; k<superdata[i].nsuper; k++) { if(haploid[i]){ if (superdata[i].superposs[k].h1 == superdata[i].superposs[k].h2){ //only consider reconstructions of haploid chromosomes where h1 == h2 //since heterozygous reconstructions aren't possible for haploids superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))); }else{ superdata[i].superposs[k].p = 0; } }else{ superdata[i].superposs[k].p = (float) (fullProbMap.get(new Long(superdata[i].superposs[k].h1))* fullProbMap.get(new Long(superdata[i].superposs[k].h2))); } total+=superdata[i].superposs[k].p; } /* normalize */ for (int k=0; k<superdata[i].nsuper; k++) { superdata[i].superposs[k].p /= total; } } /* re-estimate prob */ fullProbMap = new MapWrap(1e-10); total=poss_full*1e-10; for (int i=0; i<num_indivs; i++) { for (int k=0; k<superdata[i].nsuper; k++) { fullProbMap.put(new Long(superdata[i].superposs[k].h1),fullProbMap.get(new Long(superdata[i].superposs[k].h1)) + superdata[i].superposs[k].p); if(!haploid[i]){ fullProbMap.put(new Long(superdata[i].superposs[k].h2),fullProbMap.get(new Long(superdata[i].superposs[k].h2)) + superdata[i].superposs[k].p); total+=(2.0*superdata[i].superposs[k].p); }else{ total += superdata[i].superposs[k].p; } } } fullProbMap.normalize(total); iter++; } /* we're done - the indices of superprob now have to be decoded to reveal the actual haplotypes they represent */ if(Options.getAssocTest() == ASSOC_TRIO) { kidConsistentCache = new boolean[numFilteredTrios][][]; for(int i=0;i<numFilteredTrios*2;i+=2) { if (((Integer)kidAffStatus.elementAt(i)).intValue() == 2){ kidConsistentCache[i/2] = new boolean[superdata[i].nsuper][]; for (int n=0; n<superdata[i].nsuper; n++) { kidConsistentCache[i/2][n] = new boolean[superdata[i+1].nsuper]; for (int m=0; m<superdata[i+1].nsuper; m++) { kidConsistentCache[i/2][n][m] = kid_consistent(superdata[i].superposs[n].h1, superdata[i+1].superposs[m].h1,num_blocks, block_size,hlist,num_hlist,i/2,num_loci); } } } } } realAffectedStatus = affStatus; realKidAffectedStatus = kidAffStatus; doAssociationTests(affStatus, null,null, kidAffStatus); Vector haplos_present = new Vector(); Vector haplo_freq= new Vector(); ArrayList keys = new ArrayList(fullProbMap.theMap.keySet()); Collections.sort(keys); Iterator kitr = keys.iterator(); while(kitr.hasNext()) { Object key = kitr.next(); long keyLong = ((Long)key).longValue(); if(fullProbMap.get(key) > .001) { haplos_present.addElement(decode_haplo_str(keyLong,num_blocks,block_size,hlist,num_hlist)); haplo_freq.addElement(new Double(fullProbMap.get(key))); } } double[] freqs = new double[haplo_freq.size()]; for(int j=0;j<haplo_freq.size();j++) { freqs[j] = ((Double)haplo_freq.elementAt(j)).doubleValue(); } this.haplotypes = (int[][])haplos_present.toArray(new int[0][0]); this.frequencies = freqs; /* if (dump_phased_haplos) { if ((fpdump=fopen("emphased.haps","w"))!=NULL) { for (i=0; i<num_indivs; i++) { best=0; for (k=0; k<superdata[i].nsuper; k++) { if (superdata[i].superposs[k].p > superdata[i].superposs[best].p) { best=k; } } h1 = superdata[i].superposs[best].h1; h2 = superdata[i].superposs[best].h2; fprintf(fpdump,"%s\n",decode_haplo_str(h1,num_blocks,block_size,hlist,num_hlist)); fprintf(fpdump,"%s\n",decode_haplo_str(h2,num_blocks,block_size,hlist,num_hlist)); } fclose(fpdump); } } */ //return 0; } |
for (int j = 0; j < finishedHaplos[i].length; j++){ | for (int j = 0; j < markerNums.length; j++){ | public void saveHapsToText(Haplotype[][] finishedHaplos, File saveHapsFile) throws IOException{ NumberFormat nf = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); //open file for saving haps text FileWriter saveHapsWriter = new FileWriter(saveHapsFile); int[][]lookupPos = new int[finishedHaplos.length][]; for (int p = 0; p < lookupPos.length; p++){ lookupPos[p] = new int[finishedHaplos[p].length]; for (int q = 0; q < lookupPos[p].length; q++){ lookupPos[p][finishedHaplos[p][q].getListOrder()] = q; //System.out.println(p + " " + q + " " + finishedHaplos[p][q].getListOrder()); } } //go through each block and print haplos for (int i = 0; i < finishedHaplos.length; i++){ //write block header saveHapsWriter.write("BLOCK " + (i+1) + ". MARKERS:"); int[] markerNums = finishedHaplos[i][0].getMarkers(); for (int j = 0; j < finishedHaplos[i].length; j++){ saveHapsWriter.write(" " + (markerNums[j]+1)); } saveHapsWriter.write("\n"); //write haps and crossover percentages for (int j = 0; j < finishedHaplos[i].length; j++){ int curHapNum = lookupPos[i][j]; String theHap = new String(); int[] theGeno = finishedHaplos[i][curHapNum].getGeno(); for (int k = 0; k < theGeno.length; k++){ theHap += theGeno[k]; } saveHapsWriter.write(theHap + " (" + nf.format(finishedHaplos[i][curHapNum].getPercentage()) + ")"); if (i < finishedHaplos.length-1){ saveHapsWriter.write("\t|"); for (int crossCount = 0; crossCount < finishedHaplos[i+1].length; crossCount++){ if (crossCount != 0) saveHapsWriter.write("\t"); saveHapsWriter.write(nf.format(finishedHaplos[i][curHapNum].getCrossover(crossCount))); } saveHapsWriter.write("|"); } saveHapsWriter.write("\n"); } saveHapsWriter.write("\n"); } saveHapsWriter.close(); } |
Runtime.getRuntime().addShutdownHook(new Thread(){ public void run(){ logger.info("jManage shutting down..."); DBUtils.shutdownDB(); } }); | public static void main(String[] args) throws Exception{ if (System.getSecurityManager() == null) { System.setSecurityManager(new RMISecurityManager()); } /* create logs dir */ new File(CoreUtils.getLogDir()).mkdirs(); UserManager userManager = UserManager.getInstance(); User user = null; char[] password = null; int invalidAttempts = 0; if(args.length == 1){ password = args[0].toCharArray(); user = userManager.verifyUsernamePassword( AuthConstants.USER_ADMIN, password); /* invalid password was tried */ if(user == null){ invalidAttempts ++; } } while(user == null){ if(invalidAttempts > 0){ System.out.println("Invalid Admin Password."); } /* get the password */ password = PasswordField.getPassword("Enter password:"); /* the password should match for the admin user */ user = userManager.verifyUsernamePassword( AuthConstants.USER_ADMIN, password); invalidAttempts ++; if(invalidAttempts >= 3){ break; } } /* exit if the admin password is still invalid */ if(user == null){ System.out.println("Number of invalid attempts exceeded. Exiting !"); return; } /* set admin password as the stop key */ final JettyStopKey stopKey = new JettyStopKey(new String(password)); System.setProperty("STOP.KEY", stopKey.toString()); /* set stop.port */ System.setProperty("STOP.PORT", JManageProperties.getStopPort()); /* initialize ServiceFactory */ ServiceFactory.init(ServiceFactory.MODE_LOCAL); /* initialize crypto */ Crypto.init(password); /* clear the password */ Arrays.fill(password, ' '); /* load ACLs */ ACLStore.getInstance(); /* start the AlertEngine */ AlertEngine.getInstance().start(); /* start the application downtime service */ ApplicationDowntimeService.getInstance().start(); /* load connectors */ ConnectorConfigRegistry.init(); ConnectorRegistry.load(); /* start the application */ start(); } |
|
infoKnown = false; | void processData(){ final long maxCompDist = Long.parseLong(filenames[2])*1000; try{ this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; if (!(filenames[1].equals(""))){ readMarkers(new File(filenames[1])); } theData.generateDPrimeTable(maxCompDist); theData.guessBlocks(0); drawPicture(theData); theData.finished = true; return ""; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); defineBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch (RuntimeException rtexec){ JOptionPane.showMessageDialog(this, "An error has occured. It is probably related to file format:\n"+rtexec.toString(), "Error", JOptionPane.ERROR_MESSAGE); } } |
|
infoKnown = false; | public Object construct(){ dPrimeDisplay=null; if (!(filenames[1].equals(""))){ readMarkers(new File(filenames[1])); } theData.generateDPrimeTable(maxCompDist); theData.guessBlocks(0); drawPicture(theData); theData.finished = true; return ""; } |
|
this.fileName = file.toString(); | public Script parse(File file) throws IOException, SAXException { ensureConfigured(); getXMLReader().parse(new InputSource(new FileReader(file))); return script; } |
|
tagScript.setFileName(fileName); tagScript.setElementName(qName); | public void startElement( String namespaceURI, String localName, String qName, Attributes list) throws SAXException { try { // add check to ensure namespace URI is "" for no namespace if ( namespaceURI == null ) { namespaceURI = ""; } // if this is a tag then create a script to run it // otherwise pass the text to the current body tagScript = createTag(namespaceURI, localName, list); if (tagScript == null) { tagScript = createStaticTag(namespaceURI, localName, qName, list); } tagScriptStack.add(tagScript); if (tagScript != null) { // set parent relationship... Tag tag = tagScript.getTag(); tag.setParent(parentTag); // set the line number details if ( locator != null ) { tagScript.setLocator(locator); } // pop another tag onto the stack if ( parentTag != null ) { tagStack.add( parentTag ); } parentTag = tag; if (textBuffer.length() > 0) { addTextScript(textBuffer.toString()); textBuffer.setLength(0); } script.addScript(tagScript); // start a new body scriptStack.push(script); script = new ScriptBlock(); tag.setBody(script); } else { // XXXX: might wanna handle empty elements later... textBuffer.append("<"); textBuffer.append(qName); int size = list.getLength(); for (int i = 0; i < size; i++) { textBuffer.append(" "); textBuffer.append(list.getQName(i)); textBuffer.append("="); textBuffer.append("\""); textBuffer.append(list.getValue(i)); textBuffer.append("\""); } textBuffer.append(">"); } } catch (SAXException e) { throw e; } catch (Exception e) { log.error( "Caught exception: " + e, e ); throw new SAXException( "Runtime Exception: " + e, e ); } } |
|
log.debug("Evaluating EL: " + expression); | log.debug("Evaluating EL: " + expression.getExpression()); | public Object evaluate(JellyContext context) { try { JexlContext jexlContext = new JellyJexlContext( context ); if (log.isDebugEnabled()) { log.debug("Evaluating EL: " + expression); } Object value = expression.evaluate(jexlContext); if (log.isDebugEnabled()) { log.debug("value of expression: " + value); } return value; } catch (Exception e) { log.warn("Caught exception evaluating: " + expression + ". Reason: " + e, e); return null; } } |
loglike = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; | loglike = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]))/LN10 + ((double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; | public PairwiseLinkage computeDPrime(int pos1, int pos2){ compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } //check for non-polymorphic markers if (Chromosome.getUnfilteredMarker(pos1).getMAF() == 0 || Chromosome.getUnfilteredMarker(pos2).getMAF() == 0){ return null; } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[Chromosome.getUnfilteredMarker(pos1).getMajor()]=1; marker1num[Chromosome.getUnfilteredMarker(pos1).getMinor()]=2; marker2num[0]=0; marker2num[Chromosome.getUnfilteredMarker(pos2).getMajor()]=1; marker2num[Chromosome.getUnfilteredMarker(pos2).getMinor()]=2; byte a1,a2,b1,b2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables a1 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos1]; a2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; b1 = ((Chromosome) chromosomes.elementAt(++i)).genotypes[pos1]; b2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 >= 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 >= 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[101]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*Math.log(pA1*pA2) + known[AB]*Math.log(pA1*pB2) + known[BA]*Math.log(pB1*pA2) + known[BB]*Math.log(pB1*pB2) + (double)unknownDH*Math.log(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ //done in this slightly asinine way because of a compiler bug in the alpha version of java //which causes it to try to parallelize the swapping operations and mis-schedules them pA2 = pA2 + pB2; pB2 = pA2 - pB2; pA2 = pA2 - pB2; //pA2=pB2;pB2=temp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = (known[AA]*Math.log(tmpAA) + known[AB]*Math.log(tmpAB) + known[BA]*Math.log(tmpBA) + known[BB]*Math.log(tmpBB) + (double)unknownDH*Math.log(tmpAA*tmpBB + tmpAB*tmpBA))/LN10; } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); } |
loglike1 = known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*Math.log(pA1*pA2) + known[AB]*Math.log(pA1*pB2) + known[BA]*Math.log(pB1*pA2) + known[BB]*Math.log(pB1*pB2) + (double)unknownDH*Math.log(2*pA1*pA2*pB1*pB2); | loglike1 = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; loglike0 = (known[AA]*Math.log(pA1*pA2) + known[AB]*Math.log(pA1*pB2) + known[BA]*Math.log(pB1*pA2) + known[BB]*Math.log(pB1*pB2) + (double)unknownDH*Math.log(2*pA1*pA2*pB1*pB2))/LN10; | public PairwiseLinkage computeDPrime(int pos1, int pos2){ compsDone++; int doublehet = 0; int[][] twoMarkerHaplos = new int[3][3]; for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } //check for non-polymorphic markers if (Chromosome.getUnfilteredMarker(pos1).getMAF() == 0 || Chromosome.getUnfilteredMarker(pos2).getMAF() == 0){ return null; } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[Chromosome.getUnfilteredMarker(pos1).getMajor()]=1; marker1num[Chromosome.getUnfilteredMarker(pos1).getMinor()]=2; marker2num[0]=0; marker2num[Chromosome.getUnfilteredMarker(pos2).getMajor()]=1; marker2num[Chromosome.getUnfilteredMarker(pos2).getMinor()]=2; byte a1,a2,b1,b2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables a1 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos1]; a2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; b1 = ((Chromosome) chromosomes.elementAt(++i)).genotypes[pos1]; b2 = ((Chromosome) chromosomes.elementAt(i)).genotypes[pos2]; if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 >= 5 && a2 >= 5) || (a1 >= 5 && !(a2 == b2)) || (a2 >= 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 >= 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 >= 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ return new PairwiseLinkage(1,0,0,0,0,new double[0]); } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) this.realCompsDone++; int i,count; //int j,k,itmp; int low_i = 0; int high_i = 0; double loglike, oldloglike;// meand, mean2d, sd; double tmp;//g,h,m,tmp,r; double num, denom1, denom2, denom, dprime;//, real_dprime; double pA1, pB1, pA2, pB2, loglike1, loglike0, rsq; double tmpAA, tmpAB, tmpBA, tmpBB, dpr;// tmp2AA, tmp2AB, tmp2BA, tmp2BB; double total_prob, sum_prob; double lsurface[] = new double[101]; /* store arguments in externals and compute allele frequencies */ known[AA]=twoMarkerHaplos[1][1]; known[AB]=twoMarkerHaplos[1][2]; known[BA]=twoMarkerHaplos[2][1]; known[BB]=twoMarkerHaplos[2][2]; unknownDH=doublehet; total_chroms= (int)(known[AA]+known[AB]+known[BA]+known[BB]+(2*unknownDH)); pA1 = (known[AA]+known[AB]+unknownDH) / (double) total_chroms; pB1 = 1.0-pA1; pA2 = (known[AA]+known[BA]+unknownDH) / (double) total_chroms; pB2 = 1.0-pA2; const_prob = 0.1; /* set initial conditions */ if (const_prob < 0.00) { probHaps[AA]=pA1*pA2; probHaps[AB]=pA1*pB2; probHaps[BA]=pB1*pA2; probHaps[BB]=pB1*pB2; } else { probHaps[AA]=const_prob; probHaps[AB]=const_prob; probHaps[BA]=const_prob; probHaps[BB]=const_prob;; /* so that the first count step will produce an initial estimate without inferences (this should be closer and therefore speedier than assuming they are all at equal frequency) */ count_haps(0); estimate_p(); } /* now we have an initial reasonable guess at p we can start the EM - let the fun begin */ const_prob=0.0; count=1; loglike=-999999999.0; do { oldloglike=loglike; count_haps(count); loglike = (known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]))/LN10; if (Math.abs(loglike-oldloglike) < TOLERANCE) break; estimate_p(); count++; } while(count < 1000); /* in reality I've never seen it need more than 10 or so iterations to converge so this is really here just to keep it from running off into eternity */ loglike1 = known[AA]*Math.log(probHaps[AA]) + known[AB]*Math.log(probHaps[AB]) + known[BA]*Math.log(probHaps[BA]) + known[BB]*Math.log(probHaps[BB]) + (double)unknownDH*Math.log(probHaps[AA]*probHaps[BB] + probHaps[AB]*probHaps[BA]); loglike0 = known[AA]*Math.log(pA1*pA2) + known[AB]*Math.log(pA1*pB2) + known[BA]*Math.log(pB1*pA2) + known[BB]*Math.log(pB1*pB2) + (double)unknownDH*Math.log(2*pA1*pA2*pB1*pB2); num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; if (num < 0) { /* flip matrix so we get the positive D' */ /* flip AA with AB and BA with BB */ tmp=probHaps[AA]; probHaps[AA]=probHaps[AB]; probHaps[AB]=tmp; tmp=probHaps[BB]; probHaps[BB]=probHaps[BA]; probHaps[BA]=tmp; /* flip frequency of second allele */ //done in this slightly asinine way because of a compiler bug in the alpha version of java //which causes it to try to parallelize the swapping operations and mis-schedules them pA2 = pA2 + pB2; pB2 = pA2 - pB2; pA2 = pA2 - pB2; //pA2=pB2;pB2=temp; /* flip counts in the same fashion as p's */ tmp=numHaps[AA]; numHaps[AA]=numHaps[AB]; numHaps[AB]=tmp; tmp=numHaps[BB]; numHaps[BB]=numHaps[BA]; numHaps[BA]=tmp; /* num has now undergone a sign change */ num = probHaps[AA]*probHaps[BB] - probHaps[AB]*probHaps[BA]; /* flip known array for likelihood computation */ tmp=known[AA]; known[AA]=known[AB]; known[AB]=tmp; tmp=known[BB]; known[BB]=known[BA]; known[BA]=tmp; } denom1 = (probHaps[AA]+probHaps[BA])*(probHaps[BA]+probHaps[BB]); denom2 = (probHaps[AA]+probHaps[AB])*(probHaps[AB]+probHaps[BB]); if (denom1 < denom2) { denom = denom1; } else { denom = denom2; } dprime = num/denom; /* add computation of r^2 = (D^2)/p(1-p)q(1-q) */ rsq = num*num/(pA1*pB1*pA2*pB2); //real_dprime=dprime; for (i=0; i<=100; i++) { dpr = (double)i*0.01; tmpAA = dpr*denom + pA1*pA2; tmpAB = pA1-tmpAA; tmpBA = pA2-tmpAA; tmpBB = pB1-tmpBA; if (i==100) { /* one value will be 0 */ if (tmpAA < 1e-10) tmpAA=1e-10; if (tmpAB < 1e-10) tmpAB=1e-10; if (tmpBA < 1e-10) tmpBA=1e-10; if (tmpBB < 1e-10) tmpBB=1e-10; } lsurface[i] = (known[AA]*Math.log(tmpAA) + known[AB]*Math.log(tmpAB) + known[BA]*Math.log(tmpBA) + known[BB]*Math.log(tmpBB) + (double)unknownDH*Math.log(tmpAA*tmpBB + tmpAB*tmpBA))/LN10; } /* Confidence bounds #2 - used in Gabriel et al (2002) - translate into posterior dist of D' - assumes a flat prior dist. of D' - someday we may be able to make this even more clever by adjusting given the distribution of observed D' values for any given distance after some large scale studies are complete */ total_prob=sum_prob=0.0; for (i=0; i<=100; i++) { lsurface[i] -= loglike1; lsurface[i] = Math.pow(10.0,lsurface[i]); total_prob += lsurface[i]; } for (i=0; i<=100; i++) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { low_i = i-1; break; } } sum_prob=0.0; for (i=100; i>=0; i--) { sum_prob += lsurface[i]; if (sum_prob > 0.05*total_prob && sum_prob-lsurface[i] < 0.05*total_prob) { high_i = i+1; break; } } if (high_i > 100){ high_i = 100; } double[] freqarray = {probHaps[AA], probHaps[AB], probHaps[BB], probHaps[BA]}; return new PairwiseLinkage(roundDouble(dprime), roundDouble((loglike1-loglike0)), roundDouble(rsq), ((double)low_i/100.0), ((double)high_i/100.0), freqarray); } |
public Object evaluate(Context context) { | public Object evaluate(JellyContext context) { | public Object evaluate(Context context) { return value; } |
if ( volumes != null ) { vol = (Volume) volumes.get( volName ); | if ( volumes == null ) { getDefaultVolume(); | public static Volume getVolume( String volName ) { Volume vol = null; if ( volumes != null ) { vol = (Volume) volumes.get( volName ); } return vol; } |
vol = (Volume) volumes.get( volName ); | public static Volume getVolume( String volName ) { Volume vol = null; if ( volumes != null ) { vol = (Volume) volumes.get( volName ); } return vol; } |
|
configureTag(tag); | public Tag getTag() throws Exception { Tag tag = (Tag) tagHolder.get(); if ( tag == null ) { tag = createTag(); if ( tag != null ) { configureTag(tag); tagHolder.set(tag); } } return tag; } |
|
configureTag(tag); | public Tag getTag() throws Exception { Tag tag = (Tag) tagHolder.get(); if ( tag == null ) { tag = createTag(); if ( tag != null ) { configureTag(tag); tagHolder.set(tag); } } return tag; } |
|
ProfileColumn pc = ProfileColumn.values()[column]; | int modelColumn = convertColumnIndexToModel(column); ProfileColumn pc = ProfileColumn.values()[modelColumn]; | public TableCellRenderer getCellRenderer(int row, int column) { ProfileColumn pc = ProfileColumn.values()[column]; switch(pc) { case DATABASE: case SCHEMA: case CATALOG: case TABLE: case COLUMN: return new SQLObjectRendererFactory(); case RUNDATE: return new DateRendererFactory(); case PERCENT_UNIQUE: case PERCENT_NULL: return new PercentRendererFactory(); case AVERAGE_LENGTH: return new DecimalRendererFactory(); case MIN_VALUE: case MAX_VALUE: case AVERAGE_VALUE: return new ValueRendererFactory(); default: return super.getCellRenderer(row, column); } } |
System.out.println(baseName); | void browse(int browseType){ String name; String markerInfoName = ""; HaploView h = (HaploView) this.getParent(); h.fc.setSelectedFile(new File("")); int returned = h.fc.showOpenDialog(this); if (returned != JFileChooser.APPROVE_OPTION) return; File file = h.fc.getSelectedFile(); if (browseType == GENO){ name = file.getName(); genoFileField.setText(file.getParent()+File.separator+name); if(infoFileField.getText().equals("")){ //baseName should be everything but the final ".XXX" extension StringTokenizer st = new StringTokenizer(name,"."); String baseName = st.nextToken(); int numPieces = st.countTokens()-1; for (int i = 0; i < numPieces; i++){ baseName = baseName.concat(".").concat(st.nextToken()); } System.out.println(baseName); //check for info file for original file sample.haps //either sample.haps.info or sample.info File maybeMarkers1 = new File(file.getParent(), name + MARKER_DATA_EXT); File maybeMarkers2 = new File(file.getParent(), baseName + MARKER_DATA_EXT); if (maybeMarkers1.exists()){ markerInfoName = maybeMarkers1.getName(); }else if (maybeMarkers2.exists()){ markerInfoName = maybeMarkers2.getName(); }else{ return; } infoFileField.setText(file.getParent()+File.separator+markerInfoName); } }else if (browseType==INFO){ markerInfoName = file.getName(); infoFileField.setText(file.getParent()+File.separator+markerInfoName); } } |
|
Tag tag = findAncestorWithClass(this, DynamicTag.class); if ( tag == null ) { log.warn( "Cannot invoke body, no dynamic tag is defined in this block" ); | Script script = (Script) context.getVariable( "jelly.body" ); if ( script != null ) { script.run( context, output ); | public void run(Context context, XMLOutput output) throws Exception { // #### note this mechanism does not work properly for arbitrarily // #### nested dynamic tags. A better way is required. Tag tag = findAncestorWithClass(this, DynamicTag.class); if ( tag == null ) { // throw new JellyException( "Cannot invoke body, no dynamic tag is defined in this block" ); log.warn( "Cannot invoke body, no dynamic tag is defined in this block" ); } else { tag.getBody().run(context, output); } } |
tag.getBody().run(context, output); | Tag tag = findAncestorWithClass(this, DynamicTag.class); if ( tag == null ) { throw new JellyException( "Cannot invoke body, no dynamic tag is defined in this block" ); } else { tag.getBody().run(context, output); } | public void run(Context context, XMLOutput output) throws Exception { // #### note this mechanism does not work properly for arbitrarily // #### nested dynamic tags. A better way is required. Tag tag = findAncestorWithClass(this, DynamicTag.class); if ( tag == null ) { // throw new JellyException( "Cannot invoke body, no dynamic tag is defined in this block" ); log.warn( "Cannot invoke body, no dynamic tag is defined in this block" ); } else { tag.getBody().run(context, output); } } |
recordDowntime(event.getApplicationConfig().getApplicationId(), downtimeHistory.getDowntimeBegin(), event.getTime()); | public void handleEvent(Event event) { ApplicationDowntimeHistory downtimeHistory = getDowntimeHistory(event.getApplicationConfig()); assert downtimeHistory != null; if(event instanceof ApplicationUpEvent){ // application must have went down earlier downtimeHistory.applicationCameUp(event.getTime()); }else if(event instanceof ApplicationDownEvent){ downtimeHistory.applicationWentDown(event.getTime()); recordDowntime(event.getApplicationConfig().getApplicationId(), downtimeHistory.getDowntimeBegin(), event.getTime()); } } |
|
else if(args[i].equalsIgnoreCase("-l") || args[i].equalsIgnoreCase("-logfile")){ | else if(args[i].equalsIgnoreCase("-log")){ | private void argHandler(String[] args){ argHandlerMessages = new Vector(); int maxDistance = -1; //this means that user didn't specify any output type if it doesn't get changed below blockOutputType = -1; double hapThresh = -1; double minimumMAF=-1; double spacingThresh = -1; double minimumGenoPercent = -1; double hwCutoff = -1; double missingCutoff = -1; int maxMendel = -1; boolean assocTDT = false; boolean assocCC = false; permutationCount = 0; tagging = Tagger.NONE; maxNumTags = Tagger.DEFAULT_MAXNUMTAGS; findTags = true; double cutHighCI = -1; double cutLowCI = -1; double mafThresh = -1; double recHighCI = -1; double informFrac = -1; double fourGameteCutoff = -1; double spineDP = -1; for(int i =0; i < args.length; i++) { if(args[i].equalsIgnoreCase("-help") || args[i].equalsIgnoreCase("-h")) { System.out.println(HELP_OUTPUT); System.exit(0); } else if(args[i].equalsIgnoreCase("-n") || args[i].equalsIgnoreCase("-nogui")) { nogui = true; } else if(args[i].equalsIgnoreCase("-l") || args[i].equalsIgnoreCase("-logfile")){ i++; if (i >= args.length || args[i].charAt(0) == '-'){ logName = "haploview.log"; i--; }else{ logName = args[i]; } } else if(args[i].equalsIgnoreCase("-p") || args[i].equalsIgnoreCase("-pedfile")) { i++; if( i>=args.length || (args[i].charAt(0) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(pedFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last pedfile listed will be used"); } pedFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-pcloadletter")){ die("PC LOADLETTER?! What the fuck does that mean?!"); } else if (args[i].equalsIgnoreCase("-skipcheck") || args[i].equalsIgnoreCase("--skipcheck")){ skipCheck = true; } else if (args[i].equalsIgnoreCase("-excludeMarkers")){ i++; if(i>=args.length || (args[i].charAt(0) == '-')){ die("-excludeMarkers requires a list of markers"); } else { StringTokenizer str = new StringTokenizer(args[i],","); try { StringBuffer sb = new StringBuffer(); if (!quietMode) sb.append("Excluding markers: "); while(str.hasMoreTokens()) { String token = str.nextToken(); if(token.indexOf("..") != -1) { int lastIndex = token.indexOf(".."); int rangeStart = Integer.parseInt(token.substring(0,lastIndex)); int rangeEnd = Integer.parseInt(token.substring(lastIndex+2,token.length())); for(int j=rangeStart;j<=rangeEnd;j++) { if (!quietMode) sb.append(j+" "); excludedMarkers.add(new Integer(j)); } } else { if (!quietMode) sb.append(token+" "); excludedMarkers.add(new Integer(token)); } } argHandlerMessages.add(sb.toString()); } catch(NumberFormatException nfe) { die("-excludeMarkers argument should be of the format: 1,3,5..8,12"); } } } else if(args[i].equalsIgnoreCase("-ha") || args[i].equalsIgnoreCase("-l") || args[i].equalsIgnoreCase("-haps")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(hapsFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last haps file listed will be used"); } hapsFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-i") || args[i].equalsIgnoreCase("-info")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(infoFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last info file listed will be used"); } infoFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-a") || args[i].equalsIgnoreCase("-hapmap")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(hapmapFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last hapmap file listed will be used"); } hapmapFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-phasedhmpdata")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(phasedhmpdataFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last phased hapmap data file listed will be used"); } phasedhmpdataFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-phasedhmpsample")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(phasedhmpsampleFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last phased hapmap sample file listed will be used"); } phasedhmpsampleFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-phasedhmplegend")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(phasedhmplegendFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last phased hapmap legend file listed will be used"); } phasedhmplegendFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-phasedhapmapdl")){ phasedhapmapDownload = true; } else if (args[i].equalsIgnoreCase("-plink")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(plinkFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last PLINK file listed will be used"); } plinkFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-map")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(plinkFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last map file listed will be used"); } mapFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-k") || args[i].equalsIgnoreCase("-blocks")) { i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ blockFileName = args[i]; blockOutputType = BLOX_CUSTOM; }else{ die(args[i-1] + " requires a filename"); } } else if (args[i].equalsIgnoreCase("-png")){ outputPNG = true; } else if (args[i].equalsIgnoreCase("-smallpng") || args[i].equalsIgnoreCase("-compressedPNG")){ outputCompressedPNG = true; } else if (args[i].equalsIgnoreCase("-track")){ i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ trackFileName = args[i]; }else{ die("-track requires a filename"); } } else if(args[i].equalsIgnoreCase("-o") || args[i].equalsIgnoreCase("-output") || args[i].equalsIgnoreCase("-blockoutput")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(blockOutputType != -1){ die("Only one block output type argument is allowed."); } if(args[i].equalsIgnoreCase("SFS") || args[i].equalsIgnoreCase("GAB")){ blockOutputType = BLOX_GABRIEL; } else if(args[i].equalsIgnoreCase("GAM")){ blockOutputType = BLOX_4GAM; } else if(args[i].equalsIgnoreCase("MJD") || args[i].equalsIgnoreCase("SPI")){ blockOutputType = BLOX_SPINE; } else if(args[i].equalsIgnoreCase("ALL")) { blockOutputType = BLOX_ALL; } } else { //defaults to SFS output blockOutputType = BLOX_GABRIEL; i--; } } else if(args[i].equalsIgnoreCase("-d") || args[i].equalsIgnoreCase("--dprime") || args[i].equalsIgnoreCase("-dprime")) { outputDprime = true; } else if (args[i].equalsIgnoreCase("-c") || args[i].equalsIgnoreCase("-check")){ outputCheck = true; } else if (args[i].equalsIgnoreCase("-indcheck")){ individualCheck = true; } else if (args[i].equalsIgnoreCase("-mendel")){ mendel = true; } else if (args[i].equalsIgnoreCase("-malehets")){ malehets = true; } else if(args[i].equalsIgnoreCase("-m") || args[i].equalsIgnoreCase("-maxdistance")) { i++; maxDistance = getIntegerArg(args,i); } else if(args[i].equalsIgnoreCase("-b") || args[i].equalsIgnoreCase("-batch")) { //batch mode i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(batchFileName != null){ argHandlerMessages.add("multiple " + args[i-1] + " arguments found. only last batch file listed will be used"); } batchFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-hapthresh")) { i++; hapThresh = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-spacing")) { i++; spacingThresh = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-minMAF")) { i++; minimumMAF = getDoubleArg(args,i,0,0.5); } else if(args[i].equalsIgnoreCase("-minGeno") || args[i].equalsIgnoreCase("-minGenoPercent")) { i++; minimumGenoPercent = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-hwcutoff")) { i++; hwCutoff = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-maxMendel") ) { i++; maxMendel = getIntegerArg(args,i); } else if(args[i].equalsIgnoreCase("-missingcutoff")) { i++; missingCutoff = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-assoctdt")) { assocTDT = true; } else if(args[i].equalsIgnoreCase("-assoccc")) { assocCC = true; } else if(args[i].equalsIgnoreCase("-randomcc")){ assocCC = true; randomizeAffection = true; } else if(args[i].equalsIgnoreCase("-ldcolorscheme")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(args[i].equalsIgnoreCase("default")){ Options.setLDColorScheme(STD_SCHEME); } else if(args[i].equalsIgnoreCase("RSQ")){ Options.setLDColorScheme(RSQ_SCHEME); } else if(args[i].equalsIgnoreCase("DPALT") ){ Options.setLDColorScheme(WMF_SCHEME); } else if(args[i].equalsIgnoreCase("GAB")) { Options.setLDColorScheme(GAB_SCHEME); } else if(args[i].equalsIgnoreCase("GAM")) { Options.setLDColorScheme(GAM_SCHEME); } else if(args[i].equalsIgnoreCase("GOLD")) { Options.setLDColorScheme(GOLD_SCHEME); } } else { //defaults to STD color scheme Options.setLDColorScheme(STD_SCHEME); i--; } } else if(args[i].equalsIgnoreCase("-blockCutHighCI")) { i++; cutHighCI = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockCutLowCI")) { i++; cutLowCI = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockMafThresh")) { i++; mafThresh = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockRecHighCI")) { i++; recHighCI = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockInformFrac")) { i++; informFrac = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-block4GamCut")) { i++; fourGameteCutoff = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockSpineDP")) { i++; spineDP = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-permtests")) { i++; doPermutationTest = true; permutationCount = getIntegerArg(args,i); } else if(args[i].equalsIgnoreCase("-customassoc")) { i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ customAssocTestsFileName = args[i]; }else{ die(args[i-1] + " requires a filename"); } } else if(args[i].equalsIgnoreCase("-aggressiveTagging")) { tagging = Tagger.AGGRESSIVE_TRIPLE; } else if (args[i].equalsIgnoreCase("-pairwiseTagging")){ tagging = Tagger.PAIRWISE_ONLY; } else if (args[i].equalsIgnoreCase("-printalltags")){ Options.setPrintAllTags(true); } else if(args[i].equalsIgnoreCase("-maxNumTags")){ i++; maxNumTags = getIntegerArg(args,i); } else if(args[i].equalsIgnoreCase("-tagrSqCutoff")) { i++; tagRSquaredCutOff = getDoubleArg(args,i,0,1); } else if (args[i].equalsIgnoreCase("-dontaddtags")){ findTags = false; } else if(args[i].equalsIgnoreCase("-tagLODCutoff")) { i++; Options.setTaggerLODCutoff(getDoubleArg(args,i,0,100000)); } else if(args[i].equalsIgnoreCase("-includeTags")) { i++; if(i>=args.length || args[i].charAt(0) == '-') { die(args[i-1] + " requires a list of marker names."); } StringTokenizer str = new StringTokenizer(args[i],","); forceIncludeTags = new Vector(); while(str.hasMoreTokens()) { forceIncludeTags.add(str.nextToken()); } } else if (args[i].equalsIgnoreCase("-includeTagsFile")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { forceIncludeFileName =args[i]; }else { die(args[i-1] + " requires a filename"); } } else if(args[i].equalsIgnoreCase("-excludeTags")) { i++; if(i>=args.length || args[i].charAt(0) == '-') { die("-excludeTags requires a list of marker names."); } StringTokenizer str = new StringTokenizer(args[i],","); forceExcludeTags = new Vector(); while(str.hasMoreTokens()) { forceExcludeTags.add(str.nextToken()); } } else if (args[i].equalsIgnoreCase("-excludeTagsFile")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { forceExcludeFileName =args[i]; }else { die(args[i-1] + " requires a filename"); } } else if (args[i].equalsIgnoreCase("-captureAlleles")){ i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { captureAllelesFileName =args[i]; }else { die(args[i-1] + " requires a filename"); } } else if (args[i].equalsIgnoreCase("-designScores")){ i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { designScoresFileName =args[i]; }else { die(args[i-1] + " requires a filename"); } } else if (args[i].equalsIgnoreCase("-mintagdistance")){ i++; minTagDistance = args[i]; } else if(args[i].equalsIgnoreCase("-chromosome") || args[i].equalsIgnoreCase("-chr")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { chromosomeArg =args[i]; }else { die(args[i-1] + " requires a chromosome name"); } if(!(chromosomeArg.equalsIgnoreCase("X")) && !(chromosomeArg.equalsIgnoreCase("Y"))){ try{ if (Integer.parseInt(chromosomeArg) > 22){ die("-chromosome requires a chromsome name of 1-22, X, or Y"); } }catch(NumberFormatException nfe){ die("-chromosome requires a chromsome name of 1-22, X, or Y"); } } } else if(args[i].equalsIgnoreCase("-population")){ i++; if(!(i>=args.length) && !(args[i].charAt(0)== '-')) { populationArg = args[i]; }else { die(args[i-1] + "requires a population name"); } } else if(args[i].equalsIgnoreCase("-startpos")){ i++; startPos = args[i]; } else if(args[i].equalsIgnoreCase("-endPos")){ i++; endPos = args[i]; } else if(args[i].equalsIgnoreCase("-release")){ i++; release = args[i]; } else if(args[i].equalsIgnoreCase("-q") || args[i].equalsIgnoreCase("-quiet")) { quietMode = true; } else if(args[i].equalsIgnoreCase("-gzip")){ Options.setGzip(true); } else { die("invalid parameter specified: " + args[i]); } } if (logName != null){ logString = "*****************************************************\n" + TITLE_STRING + "\tJava Version: " + JAVA_VERSION + "\n*****************************************************\n\n\n" + "Arguments:\t"; for (int i = 0; i < args.length; i++){ logString = logString + args[i] + "\t"; } logString = logString + "\n\n"; } int countOptions = 0; if(pedFileName != null) { countOptions++; } if(hapsFileName != null) { countOptions++; } if(hapmapFileName != null) { countOptions++; } if(phasedhmpdataFileName != null) { countOptions++; if(phasedhmpsampleFileName == null){ die("You must specify a sample file for phased hapmap input."); }else if(phasedhmplegendFileName == null){ die("You must specify a legend file for phased hapmap input."); } } if(phasedhapmapDownload) { countOptions++; } if(plinkFileName != null){ countOptions++; if(mapFileName == null){ die("You must specify a map file for plink format input."); } } if(batchFileName != null) { countOptions++; } if(countOptions > 1) { die("Only one genotype input file may be specified on the command line."); } else if(countOptions == 0 && nogui) { die("You must specify a genotype input file."); } //mess with vars, set defaults, etc if(skipCheck) { argHandlerMessages.add("Skipping genotype file check"); } if(maxDistance == -1){ maxDistance = MAXDIST_DEFAULT; }else{ argHandlerMessages.add("Max LD comparison distance = " +maxDistance + "kb"); } Options.setMaxDistance(maxDistance); if(hapThresh != -1) { Options.setHaplotypeDisplayThreshold(hapThresh); argHandlerMessages.add("Haplotype display threshold = " + hapThresh); } if(minimumMAF != -1) { CheckData.mafCut = minimumMAF; argHandlerMessages.add("Minimum MAF = " + minimumMAF); } if(minimumGenoPercent != -1) { CheckData.failedGenoCut = (int)(minimumGenoPercent*100); argHandlerMessages.add("Minimum SNP genotype % = " + minimumGenoPercent); } if(hwCutoff != -1) { CheckData.hwCut = hwCutoff; argHandlerMessages.add("Hardy Weinberg equilibrium p-value cutoff = " + hwCutoff); } if(maxMendel != -1) { CheckData.numMendErrCut = maxMendel; argHandlerMessages.add("Maximum number of Mendel errors = "+maxMendel); } if(spacingThresh != -1) { Options.setSpacingThreshold(spacingThresh); argHandlerMessages.add("LD display spacing value = "+spacingThresh); } if(missingCutoff != -1) { Options.setMissingThreshold(missingCutoff); argHandlerMessages.add("Maximum amount of missing data allowed per individual = "+missingCutoff); } if(cutHighCI != -1) { FindBlocks.cutHighCI = cutHighCI; } if(cutLowCI != -1) { FindBlocks.cutLowCI = cutLowCI; } if(mafThresh != -1) { FindBlocks.mafThresh = mafThresh; } if(recHighCI != -1) { FindBlocks.recHighCI = recHighCI; } if(informFrac != -1) { FindBlocks.informFrac = informFrac; } if(fourGameteCutoff != -1) { FindBlocks.fourGameteCutoff = fourGameteCutoff; } if(spineDP != -1) { FindBlocks.spineDP = spineDP; } if(assocTDT) { Options.setAssocTest(ASSOC_TRIO); }else if(assocCC) { Options.setAssocTest(ASSOC_CC); } if (Options.getAssocTest() != ASSOC_NONE && infoFileName == null && hapmapFileName == null) { die("A marker info file must be specified when performing association tests."); } if(doPermutationTest) { if(!assocCC && !assocTDT) { die("An association test type must be specified for permutation tests to be performed."); } } if(customAssocTestsFileName != null) { if(!assocCC && !assocTDT) { die("An association test type must be specified when using a custom association test file."); } if(infoFileName == null) { die("A marker info file must be specified when using a custom association test file."); } } if(tagging != Tagger.NONE) { if(infoFileName == null && hapmapFileName == null && batchFileName == null && phasedhmpdataFileName == null && !phasedhapmapDownload) { die("A marker info file must be specified when tagging."); } if(forceExcludeTags == null) { forceExcludeTags = new Vector(); } else if (forceExcludeFileName != null) { die("-excludeTags and -excludeTagsFile cannot both be used"); } if(forceExcludeFileName != null) { File excludeFile = new File(forceExcludeFileName); forceExcludeTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(excludeFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ forceExcludeTags.add(line); } } }catch(IOException ioe) { die("An error occured while reading the file specified by -excludeTagsFile."); } } if(forceIncludeTags == null ) { forceIncludeTags = new Vector(); } else if (forceIncludeFileName != null) { die("-includeTags and -includeTagsFile cannot both be used"); } if(forceIncludeFileName != null) { File includeFile = new File(forceIncludeFileName); forceIncludeTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(includeFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ forceIncludeTags.add(line); } } }catch(IOException ioe) { die("An error occured while reading the file specified by -includeTagsFile."); } } if (captureAllelesFileName != null) { File captureFile = new File(captureAllelesFileName); captureAlleleTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(captureFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ line = line.trim(); captureAlleleTags.add(line); } } }catch(IOException ioe) { die("An error occured while reading the file specified by -captureAlleles."); } } if (designScoresFileName != null) { File designFile = new File(designScoresFileName); designScores = new Hashtable(1,1); try { BufferedReader br = new BufferedReader(new FileReader(designFile)); String line; int lines = 0; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ StringTokenizer st = new StringTokenizer(line); int length = st.countTokens(); if (length != 2){ die("Invalid formatting on line " + lines); } String marker = st.nextToken(); Double score = new Double(st.nextToken()); designScores.put(marker,score); } lines++; } }catch(IOException ioe) { die("An error occured while reading the file specified by -captureAlleles."); } } if (minTagDistance != null) { try{ if (Integer.parseInt(minTagDistance) < 0){ die("minimum tag distance cannot be negative"); } }catch(NumberFormatException nfe){ die("minimum tag distance must be a positive integer"); } Options.setTaggerMinDistance(Integer.parseInt(minTagDistance)); } //check that there isn't any overlap between include/exclude lists Vector tempInclude = (Vector) forceIncludeTags.clone(); tempInclude.retainAll(forceExcludeTags); if(tempInclude.size() > 0) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < tempInclude.size(); i++) { String s = (String) tempInclude.elementAt(i); sb.append(s).append(","); } die("The following markers appear in both the include and exclude lists: " + sb.toString()); } if(tagRSquaredCutOff != -1) { Options.setTaggerRsqCutoff(tagRSquaredCutOff); } } else if(forceExcludeTags != null || forceIncludeTags != null || tagRSquaredCutOff != -1) { die("-tagrSqCutoff, -excludeTags, -excludeTagsFile, -includeTags and -includeTagsFile cannot be used without a tagging option"); } if(chromosomeArg != null && hapmapFileName != null) { argHandlerMessages.add("-chromosome flag ignored when loading hapmap file"); chromosomeArg = null; } if(chromosomeArg != null) { Chromosome.setDataChrom("chr" + chromosomeArg); } if (phasedhapmapDownload){ if (chromosomeArg == null){ die("-phasedhapmapdl requires a chromosome specification"); }else if (!(populationArg.equalsIgnoreCase("CEU") || populationArg.equalsIgnoreCase("YRI") || populationArg.equalsIgnoreCase("CHB+JPT"))){ die("-phasedhapmapdl requires a population specification of CEU, YRI, or CHB+JPT"); } if (Integer.parseInt(chromosomeArg) < 1 && Integer.parseInt(chromosomeArg) > 22){ if (!(chromosomeArg.equalsIgnoreCase("X")) && !(chromosomeArg.equalsIgnoreCase("Y"))){ die("-chromosome must be betweeen 1 and 22, X, or Y"); } } try{ if (Integer.parseInt(startPos) > Integer.parseInt(endPos)){ die("-endpos must be greater then -startpos"); } }catch(NumberFormatException nfe){ die("-startpos and -endpos must be integer values"); } if (release == null){ release = "21"; } if (!(release.equals("21")) && !(release.startsWith("16"))){ die("release must be either 16a or 21"); } } } |
}else{ chromosomeArg = ""; | private void argHandler(String[] args){ argHandlerMessages = new Vector(); int maxDistance = -1; //this means that user didn't specify any output type if it doesn't get changed below blockOutputType = -1; double hapThresh = -1; double minimumMAF=-1; double spacingThresh = -1; double minimumGenoPercent = -1; double hwCutoff = -1; double missingCutoff = -1; int maxMendel = -1; boolean assocTDT = false; boolean assocCC = false; permutationCount = 0; tagging = Tagger.NONE; maxNumTags = Tagger.DEFAULT_MAXNUMTAGS; findTags = true; double cutHighCI = -1; double cutLowCI = -1; double mafThresh = -1; double recHighCI = -1; double informFrac = -1; double fourGameteCutoff = -1; double spineDP = -1; for(int i =0; i < args.length; i++) { if(args[i].equalsIgnoreCase("-help") || args[i].equalsIgnoreCase("-h")) { System.out.println(HELP_OUTPUT); System.exit(0); } else if(args[i].equalsIgnoreCase("-n") || args[i].equalsIgnoreCase("-nogui")) { nogui = true; } else if(args[i].equalsIgnoreCase("-l") || args[i].equalsIgnoreCase("-logfile")){ i++; if (i >= args.length || args[i].charAt(0) == '-'){ logName = "haploview.log"; i--; }else{ logName = args[i]; } } else if(args[i].equalsIgnoreCase("-p") || args[i].equalsIgnoreCase("-pedfile")) { i++; if( i>=args.length || (args[i].charAt(0) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(pedFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last pedfile listed will be used"); } pedFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-pcloadletter")){ die("PC LOADLETTER?! What the fuck does that mean?!"); } else if (args[i].equalsIgnoreCase("-skipcheck") || args[i].equalsIgnoreCase("--skipcheck")){ skipCheck = true; } else if (args[i].equalsIgnoreCase("-excludeMarkers")){ i++; if(i>=args.length || (args[i].charAt(0) == '-')){ die("-excludeMarkers requires a list of markers"); } else { StringTokenizer str = new StringTokenizer(args[i],","); try { StringBuffer sb = new StringBuffer(); if (!quietMode) sb.append("Excluding markers: "); while(str.hasMoreTokens()) { String token = str.nextToken(); if(token.indexOf("..") != -1) { int lastIndex = token.indexOf(".."); int rangeStart = Integer.parseInt(token.substring(0,lastIndex)); int rangeEnd = Integer.parseInt(token.substring(lastIndex+2,token.length())); for(int j=rangeStart;j<=rangeEnd;j++) { if (!quietMode) sb.append(j+" "); excludedMarkers.add(new Integer(j)); } } else { if (!quietMode) sb.append(token+" "); excludedMarkers.add(new Integer(token)); } } argHandlerMessages.add(sb.toString()); } catch(NumberFormatException nfe) { die("-excludeMarkers argument should be of the format: 1,3,5..8,12"); } } } else if(args[i].equalsIgnoreCase("-ha") || args[i].equalsIgnoreCase("-l") || args[i].equalsIgnoreCase("-haps")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(hapsFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last haps file listed will be used"); } hapsFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-i") || args[i].equalsIgnoreCase("-info")) { i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(infoFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last info file listed will be used"); } infoFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-a") || args[i].equalsIgnoreCase("-hapmap")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(hapmapFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last hapmap file listed will be used"); } hapmapFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-phasedhmpdata")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(phasedhmpdataFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last phased hapmap data file listed will be used"); } phasedhmpdataFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-phasedhmpsample")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(phasedhmpsampleFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last phased hapmap sample file listed will be used"); } phasedhmpsampleFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-phasedhmplegend")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(phasedhmplegendFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last phased hapmap legend file listed will be used"); } phasedhmplegendFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-phasedhapmapdl")){ phasedhapmapDownload = true; } else if (args[i].equalsIgnoreCase("-plink")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(plinkFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last PLINK file listed will be used"); } plinkFileName = args[i]; } } else if (args[i].equalsIgnoreCase("-map")){ i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(plinkFileName != null){ argHandlerMessages.add("multiple "+args[i-1] + " arguments found. only last map file listed will be used"); } mapFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-k") || args[i].equalsIgnoreCase("-blocks")) { i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ blockFileName = args[i]; blockOutputType = BLOX_CUSTOM; }else{ die(args[i-1] + " requires a filename"); } } else if (args[i].equalsIgnoreCase("-png")){ outputPNG = true; } else if (args[i].equalsIgnoreCase("-smallpng") || args[i].equalsIgnoreCase("-compressedPNG")){ outputCompressedPNG = true; } else if (args[i].equalsIgnoreCase("-track")){ i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ trackFileName = args[i]; }else{ die("-track requires a filename"); } } else if(args[i].equalsIgnoreCase("-o") || args[i].equalsIgnoreCase("-output") || args[i].equalsIgnoreCase("-blockoutput")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(blockOutputType != -1){ die("Only one block output type argument is allowed."); } if(args[i].equalsIgnoreCase("SFS") || args[i].equalsIgnoreCase("GAB")){ blockOutputType = BLOX_GABRIEL; } else if(args[i].equalsIgnoreCase("GAM")){ blockOutputType = BLOX_4GAM; } else if(args[i].equalsIgnoreCase("MJD") || args[i].equalsIgnoreCase("SPI")){ blockOutputType = BLOX_SPINE; } else if(args[i].equalsIgnoreCase("ALL")) { blockOutputType = BLOX_ALL; } } else { //defaults to SFS output blockOutputType = BLOX_GABRIEL; i--; } } else if(args[i].equalsIgnoreCase("-d") || args[i].equalsIgnoreCase("--dprime") || args[i].equalsIgnoreCase("-dprime")) { outputDprime = true; } else if (args[i].equalsIgnoreCase("-c") || args[i].equalsIgnoreCase("-check")){ outputCheck = true; } else if (args[i].equalsIgnoreCase("-indcheck")){ individualCheck = true; } else if (args[i].equalsIgnoreCase("-mendel")){ mendel = true; } else if (args[i].equalsIgnoreCase("-malehets")){ malehets = true; } else if(args[i].equalsIgnoreCase("-m") || args[i].equalsIgnoreCase("-maxdistance")) { i++; maxDistance = getIntegerArg(args,i); } else if(args[i].equalsIgnoreCase("-b") || args[i].equalsIgnoreCase("-batch")) { //batch mode i++; if(i>=args.length || ((args[i].charAt(0)) == '-')){ die(args[i-1] + " requires a filename"); } else{ if(batchFileName != null){ argHandlerMessages.add("multiple " + args[i-1] + " arguments found. only last batch file listed will be used"); } batchFileName = args[i]; } } else if(args[i].equalsIgnoreCase("-hapthresh")) { i++; hapThresh = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-spacing")) { i++; spacingThresh = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-minMAF")) { i++; minimumMAF = getDoubleArg(args,i,0,0.5); } else if(args[i].equalsIgnoreCase("-minGeno") || args[i].equalsIgnoreCase("-minGenoPercent")) { i++; minimumGenoPercent = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-hwcutoff")) { i++; hwCutoff = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-maxMendel") ) { i++; maxMendel = getIntegerArg(args,i); } else if(args[i].equalsIgnoreCase("-missingcutoff")) { i++; missingCutoff = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-assoctdt")) { assocTDT = true; } else if(args[i].equalsIgnoreCase("-assoccc")) { assocCC = true; } else if(args[i].equalsIgnoreCase("-randomcc")){ assocCC = true; randomizeAffection = true; } else if(args[i].equalsIgnoreCase("-ldcolorscheme")) { i++; if(!(i>=args.length) && !((args[i].charAt(0)) == '-')){ if(args[i].equalsIgnoreCase("default")){ Options.setLDColorScheme(STD_SCHEME); } else if(args[i].equalsIgnoreCase("RSQ")){ Options.setLDColorScheme(RSQ_SCHEME); } else if(args[i].equalsIgnoreCase("DPALT") ){ Options.setLDColorScheme(WMF_SCHEME); } else if(args[i].equalsIgnoreCase("GAB")) { Options.setLDColorScheme(GAB_SCHEME); } else if(args[i].equalsIgnoreCase("GAM")) { Options.setLDColorScheme(GAM_SCHEME); } else if(args[i].equalsIgnoreCase("GOLD")) { Options.setLDColorScheme(GOLD_SCHEME); } } else { //defaults to STD color scheme Options.setLDColorScheme(STD_SCHEME); i--; } } else if(args[i].equalsIgnoreCase("-blockCutHighCI")) { i++; cutHighCI = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockCutLowCI")) { i++; cutLowCI = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockMafThresh")) { i++; mafThresh = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockRecHighCI")) { i++; recHighCI = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockInformFrac")) { i++; informFrac = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-block4GamCut")) { i++; fourGameteCutoff = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-blockSpineDP")) { i++; spineDP = getDoubleArg(args,i,0,1); } else if(args[i].equalsIgnoreCase("-permtests")) { i++; doPermutationTest = true; permutationCount = getIntegerArg(args,i); } else if(args[i].equalsIgnoreCase("-customassoc")) { i++; if (!(i>=args.length) && !((args[i].charAt(0)) == '-')){ customAssocTestsFileName = args[i]; }else{ die(args[i-1] + " requires a filename"); } } else if(args[i].equalsIgnoreCase("-aggressiveTagging")) { tagging = Tagger.AGGRESSIVE_TRIPLE; } else if (args[i].equalsIgnoreCase("-pairwiseTagging")){ tagging = Tagger.PAIRWISE_ONLY; } else if (args[i].equalsIgnoreCase("-printalltags")){ Options.setPrintAllTags(true); } else if(args[i].equalsIgnoreCase("-maxNumTags")){ i++; maxNumTags = getIntegerArg(args,i); } else if(args[i].equalsIgnoreCase("-tagrSqCutoff")) { i++; tagRSquaredCutOff = getDoubleArg(args,i,0,1); } else if (args[i].equalsIgnoreCase("-dontaddtags")){ findTags = false; } else if(args[i].equalsIgnoreCase("-tagLODCutoff")) { i++; Options.setTaggerLODCutoff(getDoubleArg(args,i,0,100000)); } else if(args[i].equalsIgnoreCase("-includeTags")) { i++; if(i>=args.length || args[i].charAt(0) == '-') { die(args[i-1] + " requires a list of marker names."); } StringTokenizer str = new StringTokenizer(args[i],","); forceIncludeTags = new Vector(); while(str.hasMoreTokens()) { forceIncludeTags.add(str.nextToken()); } } else if (args[i].equalsIgnoreCase("-includeTagsFile")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { forceIncludeFileName =args[i]; }else { die(args[i-1] + " requires a filename"); } } else if(args[i].equalsIgnoreCase("-excludeTags")) { i++; if(i>=args.length || args[i].charAt(0) == '-') { die("-excludeTags requires a list of marker names."); } StringTokenizer str = new StringTokenizer(args[i],","); forceExcludeTags = new Vector(); while(str.hasMoreTokens()) { forceExcludeTags.add(str.nextToken()); } } else if (args[i].equalsIgnoreCase("-excludeTagsFile")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { forceExcludeFileName =args[i]; }else { die(args[i-1] + " requires a filename"); } } else if (args[i].equalsIgnoreCase("-captureAlleles")){ i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { captureAllelesFileName =args[i]; }else { die(args[i-1] + " requires a filename"); } } else if (args[i].equalsIgnoreCase("-designScores")){ i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { designScoresFileName =args[i]; }else { die(args[i-1] + " requires a filename"); } } else if (args[i].equalsIgnoreCase("-mintagdistance")){ i++; minTagDistance = args[i]; } else if(args[i].equalsIgnoreCase("-chromosome") || args[i].equalsIgnoreCase("-chr")) { i++; if(!(i>=args.length) && !(args[i].charAt(0) == '-')) { chromosomeArg =args[i]; }else { die(args[i-1] + " requires a chromosome name"); } if(!(chromosomeArg.equalsIgnoreCase("X")) && !(chromosomeArg.equalsIgnoreCase("Y"))){ try{ if (Integer.parseInt(chromosomeArg) > 22){ die("-chromosome requires a chromsome name of 1-22, X, or Y"); } }catch(NumberFormatException nfe){ die("-chromosome requires a chromsome name of 1-22, X, or Y"); } } } else if(args[i].equalsIgnoreCase("-population")){ i++; if(!(i>=args.length) && !(args[i].charAt(0)== '-')) { populationArg = args[i]; }else { die(args[i-1] + "requires a population name"); } } else if(args[i].equalsIgnoreCase("-startpos")){ i++; startPos = args[i]; } else if(args[i].equalsIgnoreCase("-endPos")){ i++; endPos = args[i]; } else if(args[i].equalsIgnoreCase("-release")){ i++; release = args[i]; } else if(args[i].equalsIgnoreCase("-q") || args[i].equalsIgnoreCase("-quiet")) { quietMode = true; } else if(args[i].equalsIgnoreCase("-gzip")){ Options.setGzip(true); } else { die("invalid parameter specified: " + args[i]); } } if (logName != null){ logString = "*****************************************************\n" + TITLE_STRING + "\tJava Version: " + JAVA_VERSION + "\n*****************************************************\n\n\n" + "Arguments:\t"; for (int i = 0; i < args.length; i++){ logString = logString + args[i] + "\t"; } logString = logString + "\n\n"; } int countOptions = 0; if(pedFileName != null) { countOptions++; } if(hapsFileName != null) { countOptions++; } if(hapmapFileName != null) { countOptions++; } if(phasedhmpdataFileName != null) { countOptions++; if(phasedhmpsampleFileName == null){ die("You must specify a sample file for phased hapmap input."); }else if(phasedhmplegendFileName == null){ die("You must specify a legend file for phased hapmap input."); } } if(phasedhapmapDownload) { countOptions++; } if(plinkFileName != null){ countOptions++; if(mapFileName == null){ die("You must specify a map file for plink format input."); } } if(batchFileName != null) { countOptions++; } if(countOptions > 1) { die("Only one genotype input file may be specified on the command line."); } else if(countOptions == 0 && nogui) { die("You must specify a genotype input file."); } //mess with vars, set defaults, etc if(skipCheck) { argHandlerMessages.add("Skipping genotype file check"); } if(maxDistance == -1){ maxDistance = MAXDIST_DEFAULT; }else{ argHandlerMessages.add("Max LD comparison distance = " +maxDistance + "kb"); } Options.setMaxDistance(maxDistance); if(hapThresh != -1) { Options.setHaplotypeDisplayThreshold(hapThresh); argHandlerMessages.add("Haplotype display threshold = " + hapThresh); } if(minimumMAF != -1) { CheckData.mafCut = minimumMAF; argHandlerMessages.add("Minimum MAF = " + minimumMAF); } if(minimumGenoPercent != -1) { CheckData.failedGenoCut = (int)(minimumGenoPercent*100); argHandlerMessages.add("Minimum SNP genotype % = " + minimumGenoPercent); } if(hwCutoff != -1) { CheckData.hwCut = hwCutoff; argHandlerMessages.add("Hardy Weinberg equilibrium p-value cutoff = " + hwCutoff); } if(maxMendel != -1) { CheckData.numMendErrCut = maxMendel; argHandlerMessages.add("Maximum number of Mendel errors = "+maxMendel); } if(spacingThresh != -1) { Options.setSpacingThreshold(spacingThresh); argHandlerMessages.add("LD display spacing value = "+spacingThresh); } if(missingCutoff != -1) { Options.setMissingThreshold(missingCutoff); argHandlerMessages.add("Maximum amount of missing data allowed per individual = "+missingCutoff); } if(cutHighCI != -1) { FindBlocks.cutHighCI = cutHighCI; } if(cutLowCI != -1) { FindBlocks.cutLowCI = cutLowCI; } if(mafThresh != -1) { FindBlocks.mafThresh = mafThresh; } if(recHighCI != -1) { FindBlocks.recHighCI = recHighCI; } if(informFrac != -1) { FindBlocks.informFrac = informFrac; } if(fourGameteCutoff != -1) { FindBlocks.fourGameteCutoff = fourGameteCutoff; } if(spineDP != -1) { FindBlocks.spineDP = spineDP; } if(assocTDT) { Options.setAssocTest(ASSOC_TRIO); }else if(assocCC) { Options.setAssocTest(ASSOC_CC); } if (Options.getAssocTest() != ASSOC_NONE && infoFileName == null && hapmapFileName == null) { die("A marker info file must be specified when performing association tests."); } if(doPermutationTest) { if(!assocCC && !assocTDT) { die("An association test type must be specified for permutation tests to be performed."); } } if(customAssocTestsFileName != null) { if(!assocCC && !assocTDT) { die("An association test type must be specified when using a custom association test file."); } if(infoFileName == null) { die("A marker info file must be specified when using a custom association test file."); } } if(tagging != Tagger.NONE) { if(infoFileName == null && hapmapFileName == null && batchFileName == null && phasedhmpdataFileName == null && !phasedhapmapDownload) { die("A marker info file must be specified when tagging."); } if(forceExcludeTags == null) { forceExcludeTags = new Vector(); } else if (forceExcludeFileName != null) { die("-excludeTags and -excludeTagsFile cannot both be used"); } if(forceExcludeFileName != null) { File excludeFile = new File(forceExcludeFileName); forceExcludeTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(excludeFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ forceExcludeTags.add(line); } } }catch(IOException ioe) { die("An error occured while reading the file specified by -excludeTagsFile."); } } if(forceIncludeTags == null ) { forceIncludeTags = new Vector(); } else if (forceIncludeFileName != null) { die("-includeTags and -includeTagsFile cannot both be used"); } if(forceIncludeFileName != null) { File includeFile = new File(forceIncludeFileName); forceIncludeTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(includeFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ forceIncludeTags.add(line); } } }catch(IOException ioe) { die("An error occured while reading the file specified by -includeTagsFile."); } } if (captureAllelesFileName != null) { File captureFile = new File(captureAllelesFileName); captureAlleleTags = new Vector(); try { BufferedReader br = new BufferedReader(new FileReader(captureFile)); String line; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ line = line.trim(); captureAlleleTags.add(line); } } }catch(IOException ioe) { die("An error occured while reading the file specified by -captureAlleles."); } } if (designScoresFileName != null) { File designFile = new File(designScoresFileName); designScores = new Hashtable(1,1); try { BufferedReader br = new BufferedReader(new FileReader(designFile)); String line; int lines = 0; while((line = br.readLine()) != null) { if(line.length() > 0 && line.charAt(0) != '#'){ StringTokenizer st = new StringTokenizer(line); int length = st.countTokens(); if (length != 2){ die("Invalid formatting on line " + lines); } String marker = st.nextToken(); Double score = new Double(st.nextToken()); designScores.put(marker,score); } lines++; } }catch(IOException ioe) { die("An error occured while reading the file specified by -captureAlleles."); } } if (minTagDistance != null) { try{ if (Integer.parseInt(minTagDistance) < 0){ die("minimum tag distance cannot be negative"); } }catch(NumberFormatException nfe){ die("minimum tag distance must be a positive integer"); } Options.setTaggerMinDistance(Integer.parseInt(minTagDistance)); } //check that there isn't any overlap between include/exclude lists Vector tempInclude = (Vector) forceIncludeTags.clone(); tempInclude.retainAll(forceExcludeTags); if(tempInclude.size() > 0) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < tempInclude.size(); i++) { String s = (String) tempInclude.elementAt(i); sb.append(s).append(","); } die("The following markers appear in both the include and exclude lists: " + sb.toString()); } if(tagRSquaredCutOff != -1) { Options.setTaggerRsqCutoff(tagRSquaredCutOff); } } else if(forceExcludeTags != null || forceIncludeTags != null || tagRSquaredCutOff != -1) { die("-tagrSqCutoff, -excludeTags, -excludeTagsFile, -includeTags and -includeTagsFile cannot be used without a tagging option"); } if(chromosomeArg != null && hapmapFileName != null) { argHandlerMessages.add("-chromosome flag ignored when loading hapmap file"); chromosomeArg = null; } if(chromosomeArg != null) { Chromosome.setDataChrom("chr" + chromosomeArg); } if (phasedhapmapDownload){ if (chromosomeArg == null){ die("-phasedhapmapdl requires a chromosome specification"); }else if (!(populationArg.equalsIgnoreCase("CEU") || populationArg.equalsIgnoreCase("YRI") || populationArg.equalsIgnoreCase("CHB+JPT"))){ die("-phasedhapmapdl requires a population specification of CEU, YRI, or CHB+JPT"); } if (Integer.parseInt(chromosomeArg) < 1 && Integer.parseInt(chromosomeArg) > 22){ if (!(chromosomeArg.equalsIgnoreCase("X")) && !(chromosomeArg.equalsIgnoreCase("Y"))){ die("-chromosome must be betweeen 1 and 22, X, or Y"); } } try{ if (Integer.parseInt(startPos) > Integer.parseInt(endPos)){ die("-endpos must be greater then -startpos"); } }catch(NumberFormatException nfe){ die("-startpos and -endpos must be integer values"); } if (release == null){ release = "21"; } if (!(release.equals("21")) && !(release.startsWith("16"))){ die("release must be either 16a or 21"); } } } |
|
phasedHapMapInfo = new String[]{phasedhmpdataFileName, phasedhmpsampleFileName, phasedhmplegendFileName, "", ""}; | phasedHapMapInfo = new String[]{phasedhmpdataFileName, phasedhmpsampleFileName, phasedhmplegendFileName, chromosomeArg, ""}; | private void processTextOnly(){ String fileName; int fileType; if(hapsFileName != null) { fileName = hapsFileName; fileType = HAPS_FILE; } else if (pedFileName != null){ fileName = pedFileName; fileType = PED_FILE; } else if (phasedhmpdataFileName != null){ fileName = phasedhmpdataFileName; fileType = PHASED_FILE; phasedHapMapInfo = new String[]{phasedhmpdataFileName, phasedhmpsampleFileName, phasedhmplegendFileName, "", ""}; } else if (phasedhapmapDownload){ fileName = "Chromosome" + chromosomeArg + populationArg; fileType = PHASEDHMPDL_FILE; phasedHapMapInfo = new String[]{fileName, populationArg, startPos, endPos, chromosomeArg, release}; }else{ fileName = hapmapFileName; fileType = HMP_FILE; } processFile(fileName,fileType,infoFileName); } |
getBody().run(context, output); | invokeBody(output); | public void doTag(XMLOutput output) throws Exception { // run the body first to configure any nested DataType instances getBody().run(context, output); AntTagSupport parentTag = (AntTagSupport) findAncestorWithClass( AntTagSupport.class); if ( parentTag == null ) { // ignore, as all it can be is a top-level datatype with // an id which has -already- added it to the project thanks // to the setAttribute() call. return; } Object targetObj = parentTag.getObject(); DataType dataType = getDataType(); if ( targetObj == null ) { // ignore, as all it can be is a top-level datatype with // an id which has -already- added it to the project thanks // to the setAttribute() call. return; } if( parentTag instanceof DynaBeanTagSupport ) { DynaBean dynaBean = ((DynaBeanTagSupport)parent).getDynaBean(); DynaClass dynaClass = dynaBean.getDynaClass(); DynaProperty dynaProperty = dynaClass.getDynaProperty(name); if ( dynaProperty != null ) { // lets set the bean property try { dynaBean.set( name, dataType ); return; } catch (Exception e) { // ignore, maybe something else will work. } } } if ( targetObj instanceof Path && dataType instanceof Path ) { ((Path)targetObj).append( (Path)dataType ); return; } IntrospectionHelper ih = IntrospectionHelper.getHelper( targetObj.getClass() ); try { ih.storeElement( getAntProject(), targetObj, dataType, getName() ); } catch (Exception e) { String dataTypeName = dataType.getClass().getName(); String baseName = dataTypeName.substring( dataTypeName.lastIndexOf( "." ) + 1 ); String methName = "add" + baseName; Method m = MethodUtils.getAccessibleMethod( targetObj.getClass(), methName, dataType.getClass() ); if ( m == null ) { String lname = baseName.toLowerCase(); methName = "add" + lname.substring( 0, 1 ).toUpperCase() + lname.substring( 1 ); m = MethodUtils.getAccessibleMethod( targetObj.getClass(), methName, dataType.getClass() ); } if ( m != null ) { try { m.invoke( targetObj, new Object[] { dataType } ); return; } catch (Exception i) { i.printStackTrace(); } } } } |
roles.add(new Role(userForm.getRole())); | String[] rolesString = userForm.getRole(); for(int ctr=0; ctr < rolesString.length; ctr++){ roles.add(new Role(rolesString[ctr])); } | private User buildUser(ActionForm form){ UserForm userForm = (UserForm)form; User user = UserManager.getInstance().getUser(userForm.getUsername()); assert user != null; List<Role> roles = new ArrayList<Role>(1); roles.add(new Role(userForm.getRole())); user.setRoles(roles); if(!userForm.getPassword().equals(UserForm.FORM_PASSWORD)){ String hashedPassword = Crypto.hash(userForm.getPassword()); user.setPassword(hashedPassword); } user.setStatus(userForm.getStatus()); return user; } |
chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadU, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadT, dad.getAffectedStatus(), currentInd.getAffectedStatus())); | chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadT, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadU, dad.getAffectedStatus(), currentInd.getAffectedStatus())); | public Vector linkageToChrom(File infile, int type) throws IllegalArgumentException, HaploViewException, PedFileException, IOException{ Vector pedFileStrings = new Vector(); Vector hapsDataStrings = new Vector(); BufferedReader reader = new BufferedReader(new FileReader(infile)); String line; while((line = reader.readLine())!=null){ if (line.length() == 0){ //skip blank lines continue; } if (line.startsWith("#@")){ hapsDataStrings.add(line.substring(2)); continue; } if (line.startsWith("#")){ //skip comments continue; } pedFileStrings.add(line); } pedFile = new PedFile(); if (type == PED_FILE){ pedFile.parseLinkage(pedFileStrings); }else{ pedFile.parseHapMap(pedFileStrings, hapsDataStrings); } Vector result = pedFile.check(); Vector indList = pedFile.getUnrelatedIndividuals(); Vector indsInTrio = new Vector(); int numMarkers = 0; numSingletons = 0; numTrios = 0; numPeds = pedFile.getNumFamilies(); Individual currentInd; Family currentFamily; Vector chrom = new Vector(); //first time through we deal with trios. for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.get(x); boolean haploid = ((currentInd.getGender() == 1) && Chromosome.getDataChrom().equalsIgnoreCase("chrx")); currentFamily = pedFile.getFamily(currentInd.getFamilyID()); if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //if indiv has both parents Individual mom = currentFamily.getMember(currentInd.getMomID()); Individual dad = currentFamily.getMember(currentInd.getDadID()); if (indList.contains(mom) && indList.contains(dad)){ numMarkers = currentInd.getNumMarkers(); byte[] dadT = new byte[numMarkers]; byte[] dadU = new byte[numMarkers]; byte[] momT = new byte[numMarkers]; byte[] momU = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte kid1, kid2; if (currentInd.getZeroed(i)){ kid1 = 0; kid2 = 0; }else{ kid1 = currentInd.getAllele(i,0); kid2 = currentInd.getAllele(i,1); } byte mom1,mom2; if (currentFamily.getMember(currentInd.getMomID()).getZeroed(i)){ mom1 = 0; mom2 = 0; }else{ mom1 = (currentFamily.getMember(currentInd.getMomID())).getAllele(i,0); mom2 = (currentFamily.getMember(currentInd.getMomID())).getAllele(i,1); } byte dad1,dad2; if (currentFamily.getMember(currentInd.getDadID()).getZeroed(i)){ dad1 = 0; dad2 = 0; }else{ dad1 = (currentFamily.getMember(currentInd.getDadID())).getAllele(i,0); dad2 = (currentFamily.getMember(currentInd.getDadID())).getAllele(i,1); } if(haploid) { if(kid1 == 0) { //kid missing dadU[i] = dad1; if (mom1 == mom2) { momT[i] = mom1; momU[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momT[i] = (byte)(4+mom1); momU[i] = (byte)(4+mom2); } } else { dadU[i] = dad1; if (mom1 == 0) { momT[i] = kid1; momU[i] = 0; } else if (mom1 == kid1) { momT[i] = mom1; momU[i] = mom2; } else { momT[i] = mom2; momU[i] = mom1; } } }else { if (kid1 == 0 || kid2 == 0) { //kid missing if (dad1 == dad2) { dadT[i] = dad1; dadU[i] = dad1; } else if (dad1 != 0 && dad2 != 0) { dadT[i] = (byte)(4+dad1); dadU[i] = (byte)(4+dad2); } if (mom1 == mom2) { momT[i] = mom1; momU[i] = mom1; } else if (mom1 != 0 && mom2 != 0){ momT[i] = (byte)(4+mom1); momU[i] = (byte)(4+mom2); } } else if (kid1 == kid2) { //kid homozygous if (dad1 == 0) { dadT[i] = kid1; dadU[i] = 0; } else if (dad1 == kid1) { dadT[i] = dad1; dadU[i] = dad2; } else { dadT[i] = dad2; dadU[i] = dad1; } if (mom1 == 0) { momT[i] = kid1; momU[i] = 0; } else if (mom1 == kid1) { momT[i] = mom1; momU[i] = mom2; } else { momT[i] = mom2; momU[i] = mom1; } } else { //kid heterozygous and this if tree's a bitch if (dad1 == 0 && mom1 == 0) { //both missing dadT[i] = 0; dadU[i] = 0; momT[i] = 0; momU[i] = 0; } else if (dad1 == 0 && mom1 != mom2) { //dad missing mom het dadT[i] = 0; dadU[i] = 0; momT[i] = (byte)(4+mom1); momU[i] = (byte)(4+mom2); } else if (mom1 == 0 && dad1 != dad2) { //dad het mom missing dadT[i] = (byte)(4+dad1); dadU[i] = (byte)(4+dad2); momT[i] = 0; momU[i] = 0; } else if (dad1 == 0 && mom1 == mom2) { //dad missing mom hom momT[i] = mom1; momU[i] = mom1; dadU[i] = 0; if (kid1 == mom1) { dadT[i] = kid2; } else { dadT[i] = kid1; } } else if (mom1 == 0 && dad1 == dad2) { //mom missing dad hom dadT[i] = dad1; dadU[i] = dad1; momU[i] = 0; if (kid1 == dad1) { momT[i] = kid2; } else { momT[i] = kid1; } } else if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadT[i] = dad1; dadU[i] = dad2; if (kid1 == dad1) { momT[i] = kid2; momU[i] = kid1; } else { momT[i] = kid1; momU[i] = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momT[i] = mom1; momU[i] = mom2; if (kid1 == mom1) { dadT[i] = kid2; dadU[i] = kid1; } else { dadT[i] = kid1; dadU[i] = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadT[i] = dad1; dadU[i] = dad1; momT[i] = mom1; momU[i] = mom1; } else { //everybody het dadT[i] = (byte)(4+dad1); dadU[i] = (byte)(4+dad2); momT[i] = (byte)(4+mom1); momU[i] = (byte)(4+mom2); } } } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momT, mom.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),momU, mom.getAffectedStatus(),currentInd.getAffectedStatus())); if(haploid) { chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadU, dad.getAffectedStatus(),currentInd.getAffectedStatus())); ((Chromosome)chrom.lastElement()).setHaploid(true); }else if(Chromosome.getDataChrom().equalsIgnoreCase("chrx")){ chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadT, dad.getAffectedStatus(), currentInd.getAffectedStatus())); ((Chromosome)chrom.lastElement()).setHaploid(true); }else { chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadU, dad.getAffectedStatus(),currentInd.getAffectedStatus())); chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),dadT, dad.getAffectedStatus(), currentInd.getAffectedStatus())); } numTrios++; indsInTrio.add(mom); indsInTrio.add(dad); indsInTrio.add(currentInd); } } } for (int x=0; x<indList.size(); x++){ currentInd = (Individual)indList.get(x); boolean haploid = ((currentInd.getGender() == 1) && Chromosome.getDataChrom().equalsIgnoreCase("chrx")); if (!indsInTrio.contains(currentInd)){ //ind has no parents or kids -- he's a singleton numMarkers = currentInd.getNumMarkers(); byte[] chrom1 = new byte[numMarkers]; byte[] chrom2 = new byte[numMarkers]; for (int i = 0; i < numMarkers; i++){ byte thisMarkerA, thisMarkerB; if (currentInd.getZeroed(i)){ thisMarkerA = 0; thisMarkerB = 0; }else{ thisMarkerA = currentInd.getAllele(i,0); thisMarkerB = currentInd.getAllele(i,1); } if (thisMarkerA == thisMarkerB || thisMarkerA == 0 || thisMarkerB == 0){ chrom1[i] = thisMarkerA; chrom2[i] = thisMarkerB; }else{ chrom1[i] = (byte)(4+thisMarkerA); chrom2[i] = (byte)(4+thisMarkerB); } } chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom1, currentInd.getAffectedStatus(), -1)); if(!haploid){ chrom.add(new Chromosome(currentInd.getFamilyID(),currentInd.getIndividualID(),chrom2,currentInd.getAffectedStatus(), -1)); }else{ ((Chromosome)chrom.lastElement()).setHaploid(true); } numSingletons++; } } chromosomes = chrom; //wipe clean any existing marker info so we know we're starting with a new file Chromosome.markers = null; return result; } |
logger.debug("dbchildrenadd. source="+e.getSource()); if (!SwingUtilities.isEventDispatchThread()) return; | if (logger.isDebugEnabled()) { logger.debug("dbChildrenInserted. source="+e.getSource() +" indices: "+Arrays.asList(e.getChangedIndices()) +" children: "+Arrays.asList(e.getChildren())); } | public void dbChildrenInserted(SQLObjectEvent e) { logger.debug("dbchildrenadd. source="+e.getSource()); if (!SwingUtilities.isEventDispatchThread()) return; if (logger.isDebugEnabled()) { if (e.getSQLSource() instanceof SQLRelationship) { SQLRelationship r = (SQLRelationship) e.getSQLSource(); logger.debug("dbChildrenInserted SQLObjectEvent: "+e +"; pk path="+Arrays.asList(getPkPathToRelationship(r)) +"; fk path="+Arrays.asList(getFkPathToRelationship(r))); } else { logger.debug("dbChildrenInserted SQLObjectEvent: "+e +"; tree path="+Arrays.asList(getPathToNode(e.getSQLSource()))); } } try { SQLObject[] newEventSources = e.getChildren(); for (int i = 0; i < newEventSources.length; i++) { ArchitectUtils.listenToHierarchy(this, newEventSources[i]); } } catch (ArchitectException ex) { logger.error("Error listening to added object", ex); } // relationships have two parents (pktable and fktable) so we need to fire two TMEs if (e.getSQLSource() instanceof SQLRelationship) { TreeModelEvent tme = new TreeModelEvent (this, getPkPathToRelationship((SQLRelationship) e.getSQLSource()), e.getChangedIndices(), e.getChildren()); fireTreeNodesInserted(tme); tme = new TreeModelEvent (this, getFkPathToRelationship((SQLRelationship) e.getSQLSource()), e.getChangedIndices(), e.getChildren()); fireTreeNodesInserted(tme); } else { TreeModelEvent tme = new TreeModelEvent (this, getPathToNode(e.getSQLSource()), e.getChangedIndices(), e.getChildren()); fireTreeNodesInserted(tme); } } |
if ((!SwingUtilities.isEventDispatchThread()) && (!testMode)) { logger.debug("Not refiring because this is not the EDT."); return; } | public void dbChildrenInserted(SQLObjectEvent e) { logger.debug("dbchildrenadd. source="+e.getSource()); if (!SwingUtilities.isEventDispatchThread()) return; if (logger.isDebugEnabled()) { if (e.getSQLSource() instanceof SQLRelationship) { SQLRelationship r = (SQLRelationship) e.getSQLSource(); logger.debug("dbChildrenInserted SQLObjectEvent: "+e +"; pk path="+Arrays.asList(getPkPathToRelationship(r)) +"; fk path="+Arrays.asList(getFkPathToRelationship(r))); } else { logger.debug("dbChildrenInserted SQLObjectEvent: "+e +"; tree path="+Arrays.asList(getPathToNode(e.getSQLSource()))); } } try { SQLObject[] newEventSources = e.getChildren(); for (int i = 0; i < newEventSources.length; i++) { ArchitectUtils.listenToHierarchy(this, newEventSources[i]); } } catch (ArchitectException ex) { logger.error("Error listening to added object", ex); } // relationships have two parents (pktable and fktable) so we need to fire two TMEs if (e.getSQLSource() instanceof SQLRelationship) { TreeModelEvent tme = new TreeModelEvent (this, getPkPathToRelationship((SQLRelationship) e.getSQLSource()), e.getChangedIndices(), e.getChildren()); fireTreeNodesInserted(tme); tme = new TreeModelEvent (this, getFkPathToRelationship((SQLRelationship) e.getSQLSource()), e.getChangedIndices(), e.getChildren()); fireTreeNodesInserted(tme); } else { TreeModelEvent tme = new TreeModelEvent (this, getPathToNode(e.getSQLSource()), e.getChangedIndices(), e.getChildren()); fireTreeNodesInserted(tme); } } |
|
logger.debug("dbchildrenremove. source="+e.getSource()); if (!SwingUtilities.isEventDispatchThread()) return; | if (logger.isDebugEnabled()) { logger.debug("dbchildrenremoved. source="+e.getSource() +" indices: "+Arrays.asList(e.getChangedIndices()) +" children: "+Arrays.asList(e.getChildren())); } | public void dbChildrenRemoved(SQLObjectEvent e) { logger.debug("dbchildrenremove. source="+e.getSource()); if (!SwingUtilities.isEventDispatchThread()) return; if (logger.isDebugEnabled()) logger.debug("dbChildrenRemoved SQLObjectEvent: "+e); try { SQLObject[] oldEventSources = e.getChildren(); for (int i = 0; i < oldEventSources.length; i++) { ArchitectUtils.unlistenToHierarchy(this, oldEventSources[i]); } } catch (ArchitectException ex) { logger.error("Error unlistening to removed object", ex); } if (e.getSQLSource() instanceof SQLRelationship) { TreeModelEvent tme = new TreeModelEvent (this, getPkPathToRelationship((SQLRelationship) e.getSQLSource()), e.getChangedIndices(), e.getChildren()); fireTreeNodesRemoved(tme); tme = new TreeModelEvent (this, getFkPathToRelationship((SQLRelationship) e.getSQLSource()), e.getChangedIndices(), e.getChildren()); fireTreeNodesRemoved(tme); } else { TreeModelEvent tme = new TreeModelEvent (this, getPathToNode(e.getSQLSource()), e.getChangedIndices(), e.getChildren()); fireTreeNodesRemoved(tme); } } |
if ((!SwingUtilities.isEventDispatchThread()) && (!testMode)) { logger.debug("Not refiring because this is not the EDT."); return; } | public void dbChildrenRemoved(SQLObjectEvent e) { logger.debug("dbchildrenremove. source="+e.getSource()); if (!SwingUtilities.isEventDispatchThread()) return; if (logger.isDebugEnabled()) logger.debug("dbChildrenRemoved SQLObjectEvent: "+e); try { SQLObject[] oldEventSources = e.getChildren(); for (int i = 0; i < oldEventSources.length; i++) { ArchitectUtils.unlistenToHierarchy(this, oldEventSources[i]); } } catch (ArchitectException ex) { logger.error("Error unlistening to removed object", ex); } if (e.getSQLSource() instanceof SQLRelationship) { TreeModelEvent tme = new TreeModelEvent (this, getPkPathToRelationship((SQLRelationship) e.getSQLSource()), e.getChangedIndices(), e.getChildren()); fireTreeNodesRemoved(tme); tme = new TreeModelEvent (this, getFkPathToRelationship((SQLRelationship) e.getSQLSource()), e.getChangedIndices(), e.getChildren()); fireTreeNodesRemoved(tme); } else { TreeModelEvent tme = new TreeModelEvent (this, getPathToNode(e.getSQLSource()), e.getChangedIndices(), e.getChildren()); fireTreeNodesRemoved(tme); } } |
|
if (!SwingUtilities.isEventDispatchThread()) return; | if ((!SwingUtilities.isEventDispatchThread()) && (!testMode)) { logger.debug("Not refiring because this is not the EDT."); return; } | public void dbObjectChanged(SQLObjectEvent e) { logger.debug("dbObjectChanged. source="+e.getSource()); if (!SwingUtilities.isEventDispatchThread()) return; if (logger.isDebugEnabled()) logger.debug("dbObjectChanged SQLObjectEvent: "+e); if (e.getPropertyName().equals("name") && !e.getNewValue().equals(e.getSQLSource().getName()) ) { logger.error("Name change event has wrong new value. new="+e.getNewValue()+"; real="+e.getSQLSource().getName()); } SQLObject source = e.getSQLSource(); if (source instanceof SQLRelationship) { SQLRelationship r = (SQLRelationship) source; fireTreeNodesChanged(new TreeModelEvent(this, getPkPathToRelationship(r))); fireTreeNodesChanged(new TreeModelEvent(this, getFkPathToRelationship(r))); } else { fireTreeNodesChanged(new TreeModelEvent(this, getPathToNode(source))); } } |
if (!SwingUtilities.isEventDispatchThread()) return; | public void dbStructureChanged(SQLObjectEvent e) { logger.debug("dbStructureChanged. source="+e.getSource()); if (!SwingUtilities.isEventDispatchThread()) return; try { ArchitectUtils.listenToHierarchy(this, e.getSQLSource()); TreeModelEvent tme = new TreeModelEvent(this, getPathToNode(e.getSQLSource())); fireTreeStructureChanged(tme); } catch (ArchitectException ex) { logger.error("Couldn't listen to hierarchy rooted at "+e.getSQLSource(), ex); } } |
|
TreeModelEvent tme = new TreeModelEvent(this, getPathToNode(e.getSQLSource())); fireTreeStructureChanged(tme); | public void dbStructureChanged(SQLObjectEvent e) { logger.debug("dbStructureChanged. source="+e.getSource()); if (!SwingUtilities.isEventDispatchThread()) return; try { ArchitectUtils.listenToHierarchy(this, e.getSQLSource()); TreeModelEvent tme = new TreeModelEvent(this, getPathToNode(e.getSQLSource())); fireTreeStructureChanged(tme); } catch (ArchitectException ex) { logger.error("Couldn't listen to hierarchy rooted at "+e.getSQLSource(), ex); } } |
|
if ((!SwingUtilities.isEventDispatchThread()) && (!testMode)) { logger.debug("Not refiring because this is not the EDT."); return; } TreeModelEvent tme = new TreeModelEvent(this, getPathToNode(e.getSQLSource())); fireTreeStructureChanged(tme); | public void dbStructureChanged(SQLObjectEvent e) { logger.debug("dbStructureChanged. source="+e.getSource()); if (!SwingUtilities.isEventDispatchThread()) return; try { ArchitectUtils.listenToHierarchy(this, e.getSQLSource()); TreeModelEvent tme = new TreeModelEvent(this, getPathToNode(e.getSQLSource())); fireTreeStructureChanged(tme); } catch (ArchitectException ex) { logger.error("Couldn't listen to hierarchy rooted at "+e.getSQLSource(), ex); } } |
|
SQLObject[] path = new SQLObject[pathToFkTable.length + 1]; | SQLObject[] path = new SQLObject[pathToFkTable.length + 2]; | public SQLObject[] getFkPathToRelationship(SQLRelationship rel) { SQLObject[] pathToFkTable = getPathToNode(rel.getFkTable()); SQLObject[] path = new SQLObject[pathToFkTable.length + 1]; System.arraycopy(pathToFkTable, 0, path, 0, pathToFkTable.length); path[path.length - 1] = rel; return path; } |
path[path.length - 2] = rel.getFkTable().getImportedKeysFolder(); | public SQLObject[] getFkPathToRelationship(SQLRelationship rel) { SQLObject[] pathToFkTable = getPathToNode(rel.getFkTable()); SQLObject[] path = new SQLObject[pathToFkTable.length + 1]; System.arraycopy(pathToFkTable, 0, path, 0, pathToFkTable.length); path[path.length - 1] = rel; return path; } |
|
SQLObject[] path = new SQLObject[pathToPkTable.length + 1]; | SQLObject[] path = new SQLObject[pathToPkTable.length + 2]; | public SQLObject[] getPkPathToRelationship(SQLRelationship rel) { SQLObject[] pathToPkTable = getPathToNode(rel.getPkTable()); SQLObject[] path = new SQLObject[pathToPkTable.length + 1]; System.arraycopy(pathToPkTable, 0, path, 0, pathToPkTable.length); path[path.length - 1] = rel; return path; } |
path[path.length - 1] = rel; | path[path.length - 2] = rel.getPkTable().getExportedKeysFolder(); path[path.length - 1] = rel; | public SQLObject[] getPkPathToRelationship(SQLRelationship rel) { SQLObject[] pathToPkTable = getPathToNode(rel.getPkTable()); SQLObject[] path = new SQLObject[pathToPkTable.length + 1]; System.arraycopy(pathToPkTable, 0, path, 0, pathToPkTable.length); path[path.length - 1] = rel; return path; } |
if (font == null) { logger.error("getPreferredSize(): Null font in TablePane "+c); logger.error("getPreferredSize(): TablePane's parent is "+c.getParent()); | FontRenderContext frc = c.getRecentFontRenderContext(); if (font == null || frc == null) { logger.error("getPreferredSize(): TablePane is missing font or fontRenderContext."); logger.error("getPreferredSize(): component="+c.getName()+"; font="+font+"; frc="+frc); | public Dimension getPreferredSize(JComponent jc) { TablePane c = (TablePane) jc; SQLTable table = c.getModel(); if (table == null) return null; int height = 0; int width = 0; try { Insets insets = c.getInsets(); java.util.List columnList = table.getColumns(); int cols = columnList.size(); Font font = c.getFont(); if (font == null) { logger.error("getPreferredSize(): Null font in TablePane "+c); logger.error("getPreferredSize(): TablePane's parent is "+c.getParent()); return null; } FontMetrics metrics = c.getFontMetrics(font); int fontHeight = metrics.getHeight(); height = insets.top + fontHeight + gap + c.getMargin().top + cols*fontHeight + boxLineThickness*2 + c.getMargin().bottom + insets.bottom; width = minimumWidth; logger.debug("starting width is: " + width); Iterator columnIt = table.getColumns().iterator(); while (columnIt.hasNext()) { String theColumn = columnIt.next().toString(); width = Math.max(width, metrics.stringWidth(theColumn)); logger.debug("new width is: " + width); } width += insets.left + c.getMargin().left + boxLineThickness*2 + c.getMargin().right + insets.right; } catch (ArchitectException e) { logger.warn("BasicTablePaneUI.getPreferredSize failed due to", e); width = 100; height = 100; } return new Dimension(width, height); } |
width = Math.max(width, metrics.stringWidth(theColumn)); | width = Math.max(width, (int) font.getStringBounds(theColumn, frc).getWidth()); | public Dimension getPreferredSize(JComponent jc) { TablePane c = (TablePane) jc; SQLTable table = c.getModel(); if (table == null) return null; int height = 0; int width = 0; try { Insets insets = c.getInsets(); java.util.List columnList = table.getColumns(); int cols = columnList.size(); Font font = c.getFont(); if (font == null) { logger.error("getPreferredSize(): Null font in TablePane "+c); logger.error("getPreferredSize(): TablePane's parent is "+c.getParent()); return null; } FontMetrics metrics = c.getFontMetrics(font); int fontHeight = metrics.getHeight(); height = insets.top + fontHeight + gap + c.getMargin().top + cols*fontHeight + boxLineThickness*2 + c.getMargin().bottom + insets.bottom; width = minimumWidth; logger.debug("starting width is: " + width); Iterator columnIt = table.getColumns().iterator(); while (columnIt.hasNext()) { String theColumn = columnIt.next().toString(); width = Math.max(width, metrics.stringWidth(theColumn)); logger.debug("new width is: " + width); } width += insets.left + c.getMargin().left + boxLineThickness*2 + c.getMargin().right + insets.right; } catch (ArchitectException e) { logger.warn("BasicTablePaneUI.getPreferredSize failed due to", e); width = 100; height = 100; } return new Dimension(width, height); } |
tp.setRecentFontRenderContext(g2.getFontRenderContext()); | public void paint(Graphics g, JComponent c) { TablePane tp = (TablePane) c; try { Graphics2D g2 = (Graphics2D) g; if (logger.isDebugEnabled()) { Rectangle clip = g2.getClipBounds(); if (clip != null) { g2.setColor(Color.red); clip.width--; clip.height--; g2.draw(clip); g2.setColor(tp.getForeground()); logger.debug("Clipping region: "+g2.getClip()); } else { logger.debug("Null clipping region"); } } // We don't want to paint inside the insets or borders. Insets insets = c.getInsets(); g.translate(insets.left, insets.top); int width = c.getWidth() - insets.left - insets.right; int height = c.getHeight() - insets.top - insets.bottom; Font font = c.getFont(); if (font == null) { logger.error("paint(): Null font in TablePane "+c); logger.error("paint(): TablePane's parent is "+c.getParent()); if (c.getParent() != null) { logger.error("paint(): parent font is "+c.getParent().getFont()); } return; } FontMetrics metrics = c.getFontMetrics(font); int fontHeight = metrics.getHeight(); int ascent = metrics.getAscent(); int maxDescent = metrics.getMaxDescent(); int y = 0; // hilight title if table is selected if (tp.selected == true) { g2.setColor(selectedColor); } else { g2.setColor(unselectedColor); } g2.fillRect(0, 0, c.getWidth(), fontHeight); g2.setColor(c.getForeground()); // print table name g2.drawString(tablePane.getModel().getTableName(), 0, y += ascent); // draw box around columns if (fontHeight < 0) { throw new IllegalStateException("FontHeight is negative"); } g2.drawRect(0, fontHeight+gap, width-boxLineThickness, height-(fontHeight+gap+boxLineThickness)); y += gap + boxLineThickness + tp.getMargin().top; // print columns Iterator colNameIt = tablePane.getModel().getColumns().iterator(); int i = 0; int hwidth = width-tp.getMargin().right-tp.getMargin().left-boxLineThickness*2; boolean stillNeedPKLine = true; while (colNameIt.hasNext()) { if (tp.isColumnSelected(i)) { logger.debug("Column "+i+" is selected"); g2.setColor(selectedColor); g2.fillRect(boxLineThickness+tp.getMargin().left, y-ascent+fontHeight, hwidth, fontHeight); g2.setColor(tp.getForeground()); } SQLColumn col = (SQLColumn) colNameIt.next(); if (col.getPrimaryKeySeq() == null && stillNeedPKLine) { stillNeedPKLine = false; g2.drawLine(0, y+maxDescent, width-1, y+maxDescent); } g2.drawString(col.getShortDisplayName(), boxLineThickness+tp.getMargin().left, y += fontHeight); i++; } // paint insertion point int ip = tablePane.getInsertionPoint(); if (ip != TablePane.COLUMN_INDEX_NONE) { y = gap + boxLineThickness + tp.getMargin().top + ((ip+1) * fontHeight); g2.drawLine(5, y, width - 6, y); g2.drawLine(2, y-3, 5, y); g2.drawLine(2, y+3, 5, y); g2.drawLine(width - 3, y-3, width - 6, y); g2.drawLine(width - 3, y+3, width - 6, y); } g.translate(-insets.left, -insets.top); } catch (ArchitectException e) { logger.warn("BasicTablePaneUI.paint failed", e); } } |
|
output.append("<div id=\"" + getId() + "\">"); | public String draw(DashboardContext context) { StringBuffer output = new StringBuffer(); // wrap wih div tag output.append("<div id=\"" + getId() + "\">"); output.append(component.draw(context)); output.append("</div>"); // append script String appId = context.getWebContext().getApplicationConfig().getApplicationId(); output.append("\n<script>"); output.append("self.setTimeout(\"refreshDBComponent("); output.append("''"); output.append(getId()); output.append("'', "); output.append(refreshInterval + ", " + appId + ")\", " + refreshInterval + ");"); output.append("</script>"); return output.toString(); } |
|
output.append("</div>"); | public String draw(DashboardContext context) { StringBuffer output = new StringBuffer(); // wrap wih div tag output.append("<div id=\"" + getId() + "\">"); output.append(component.draw(context)); output.append("</div>"); // append script String appId = context.getWebContext().getApplicationConfig().getApplicationId(); output.append("\n<script>"); output.append("self.setTimeout(\"refreshDBComponent("); output.append("''"); output.append(getId()); output.append("'', "); output.append(refreshInterval + ", " + appId + ")\", " + refreshInterval + ");"); output.append("</script>"); return output.toString(); } |
|
output.append(dashboardId); output.append("'', ''"); | public String draw(DashboardContext context) { StringBuffer output = new StringBuffer(); // wrap wih div tag output.append("<div id=\"" + getId() + "\">"); output.append(component.draw(context)); output.append("</div>"); // append script String appId = context.getWebContext().getApplicationConfig().getApplicationId(); output.append("\n<script>"); output.append("self.setTimeout(\"refreshDBComponent("); output.append("''"); output.append(getId()); output.append("'', "); output.append(refreshInterval + ", " + appId + ")\", " + refreshInterval + ");"); output.append("</script>"); return output.toString(); } |
|
if(roles == null){ roles = new ArrayList(); } | public List getRoles() { return roles; } |
|
if (source.physicalRadio.isSelected()) { SQLCatalog cat = (SQLCatalog) source.catalogDropdown.getSelectedItem(); SQLSchema sch = (SQLSchema) source.schemaDropdown.getSelectedItem(); gen.setTargetCatalog(cat == null ? null : cat.getPhysicalName()); gen.setTargetSchema(sch == null ? null : sch.getPhysicalName()); } | public void actionPerformed(ActionEvent e) { startCompareAction.setEnabled(false); copySettingsToProject(); // XXX: should do most or all of this work in a worker thread final CompareSQL sourceComp; final CompareSQL targetComp; final SQLObject left; final SQLObject right; try { left = source.getObjectToCompare(); if (left.getChildType() == SQLTable.class) { sourceTables = left.getChildren(); } else { sourceTables = new ArrayList(); } right = target.getObjectToCompare(); if (right.getChildType() == SQLTable.class) { targetTables = right.getChildren(); } else { targetTables = new ArrayList(); } sourceComp = new CompareSQL(sourceTables, targetTables); targetComp = new CompareSQL(targetTables, sourceTables); } catch (FileNotFoundException ex) { JOptionPane.showMessageDialog( ArchitectFrame.getMainInstance(), "File not found: "+ex.getMessage()); logger.error("File could not be found.", ex); return; } catch (IOException ex) { JOptionPane.showMessageDialog( ArchitectFrame.getMainInstance(), "Could not read file: "+ex.getMessage()); logger.error("Could not read file", ex); return; } catch (ArchitectException ex) { ASUtils.showExceptionDialog(CompareDMPanel.this, "Could not begin diff process", ex); return; } ArchitectSwingWorker compareWorker = new ArchitectSwingWorker() { private List<DiffChunk<SQLObject>> diff; private List<DiffChunk<SQLObject>> diff1; public void doStuff() throws ArchitectException { diff = sourceComp.generateTableDiffs(); diff1 = targetComp.generateTableDiffs(); } public void cleanup() { logger.debug("cleanup starts"); try { DefaultStyledDocument sourceDoc = new DefaultStyledDocument(); DefaultStyledDocument targetDoc = new DefaultStyledDocument(); DDLGenerator gen =(DDLGenerator)((Class)((LabelValueBean) sqlTypeDropdown.getSelectedItem()).getValue()).newInstance(); final Map<DiffType, AttributeSet> styles = new HashMap<DiffType, AttributeSet>(); { SimpleAttributeSet att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.red); styles.put(DiffType.LEFTONLY, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.green.darker().darker()); styles.put(DiffType.RIGHTONLY, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.black); styles.put(DiffType.SAME, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.orange); styles.put(DiffType.MODIFIED, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.blue); styles.put(DiffType.KEY_CHANGED, att); } if (sqlButton.isSelected()) { List<DiffChunk<SQLObject>> addRelationships = new ArrayList<DiffChunk<SQLObject>>(); List<DiffChunk<SQLObject>> dropRelationships = new ArrayList<DiffChunk<SQLObject>>(); List<DiffChunk<SQLObject>> nonRelationship = new ArrayList<DiffChunk<SQLObject>> (); for (DiffChunk d : diff) { if (d.getData() instanceof SQLRelationship) { if (d.getType() == DiffType.LEFTONLY) { dropRelationships.add(d); } else if (d.getType() == DiffType.RIGHTONLY) { addRelationships.add(d); } } else { nonRelationship.add(d); } } sqlScriptGenerator(styles, dropRelationships, gen); sqlScriptGenerator(styles, nonRelationship, gen); sqlScriptGenerator(styles, addRelationships, gen); } else if (englishButton.isSelected()) { generateEnglishDescription(styles, diff, sourceDoc); generateEnglishDescription(styles, diff1, targetDoc); } else { throw new IllegalStateException( "Don't know what type of output to make"); } // get the title string for the compareDMFrame if (sqlButton.isSelected()) { String titleString = "Comparing " + left.getName() + " to " + right.getName() + " using SQL"; SQLDatabase db = null; if ( source.loadRadio.isSelected() ) db = null; else if (source.playPenRadio.isSelected()) db = ArchitectFrame.getMainInstance().playpen.getDatabase(); else db = source.getDatabase(); SQLScriptDialog ssd = new SQLScriptDialog(ArchitectFrame.getMainInstance(), "Compare DM", titleString, false, gen, db == null?null:db.getDataSource(), false); ssd.setVisible(true); } else { CompareDMFrame cf = new CompareDMFrame(sourceDoc, targetDoc, left.getName(),right.getName()); cf.pack(); cf.setVisible(true); } } catch (ArchitectDiffException ex) { JOptionPane.showMessageDialog(CompareDMPanel.this, "Could not perform the diff:\n" + ex.getMessage(), "Diff Error", JOptionPane.ERROR_MESSAGE); logger.error("Couldn't do diff", ex); } catch (ArchitectException exp) { ASUtils.showExceptionDialog("StartCompareAction failed", exp); logger.error("StartCompareAction failed", exp); } catch (BadLocationException ex) { ASUtils.showExceptionDialog( "Could not create document for results", ex); logger.error("Could not create document for results", ex); } catch (Exception ex) { ASUtils.showExceptionDialog("Unxepected Exception!", ex); logger.error("Unxepected Exception!", ex); } finally { startCompareAction.setEnabled(isStartable()); } logger.debug("cleanup finished"); } }; new Thread(compareWorker).start(); new ProgressWatcher(progressBar,sourceComp); } |
|
if (logger.isDebugEnabled()) logger.debug(d); | public void actionPerformed(ActionEvent e) { startCompareAction.setEnabled(false); copySettingsToProject(); // XXX: should do most or all of this work in a worker thread final CompareSQL sourceComp; final CompareSQL targetComp; final SQLObject left; final SQLObject right; try { left = source.getObjectToCompare(); if (left.getChildType() == SQLTable.class) { sourceTables = left.getChildren(); } else { sourceTables = new ArrayList(); } right = target.getObjectToCompare(); if (right.getChildType() == SQLTable.class) { targetTables = right.getChildren(); } else { targetTables = new ArrayList(); } sourceComp = new CompareSQL(sourceTables, targetTables); targetComp = new CompareSQL(targetTables, sourceTables); } catch (FileNotFoundException ex) { JOptionPane.showMessageDialog( ArchitectFrame.getMainInstance(), "File not found: "+ex.getMessage()); logger.error("File could not be found.", ex); return; } catch (IOException ex) { JOptionPane.showMessageDialog( ArchitectFrame.getMainInstance(), "Could not read file: "+ex.getMessage()); logger.error("Could not read file", ex); return; } catch (ArchitectException ex) { ASUtils.showExceptionDialog(CompareDMPanel.this, "Could not begin diff process", ex); return; } ArchitectSwingWorker compareWorker = new ArchitectSwingWorker() { private List<DiffChunk<SQLObject>> diff; private List<DiffChunk<SQLObject>> diff1; public void doStuff() throws ArchitectException { diff = sourceComp.generateTableDiffs(); diff1 = targetComp.generateTableDiffs(); } public void cleanup() { logger.debug("cleanup starts"); try { DefaultStyledDocument sourceDoc = new DefaultStyledDocument(); DefaultStyledDocument targetDoc = new DefaultStyledDocument(); DDLGenerator gen =(DDLGenerator)((Class)((LabelValueBean) sqlTypeDropdown.getSelectedItem()).getValue()).newInstance(); final Map<DiffType, AttributeSet> styles = new HashMap<DiffType, AttributeSet>(); { SimpleAttributeSet att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.red); styles.put(DiffType.LEFTONLY, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.green.darker().darker()); styles.put(DiffType.RIGHTONLY, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.black); styles.put(DiffType.SAME, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.orange); styles.put(DiffType.MODIFIED, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.blue); styles.put(DiffType.KEY_CHANGED, att); } if (sqlButton.isSelected()) { List<DiffChunk<SQLObject>> addRelationships = new ArrayList<DiffChunk<SQLObject>>(); List<DiffChunk<SQLObject>> dropRelationships = new ArrayList<DiffChunk<SQLObject>>(); List<DiffChunk<SQLObject>> nonRelationship = new ArrayList<DiffChunk<SQLObject>> (); for (DiffChunk d : diff) { if (d.getData() instanceof SQLRelationship) { if (d.getType() == DiffType.LEFTONLY) { dropRelationships.add(d); } else if (d.getType() == DiffType.RIGHTONLY) { addRelationships.add(d); } } else { nonRelationship.add(d); } } sqlScriptGenerator(styles, dropRelationships, gen); sqlScriptGenerator(styles, nonRelationship, gen); sqlScriptGenerator(styles, addRelationships, gen); } else if (englishButton.isSelected()) { generateEnglishDescription(styles, diff, sourceDoc); generateEnglishDescription(styles, diff1, targetDoc); } else { throw new IllegalStateException( "Don't know what type of output to make"); } // get the title string for the compareDMFrame if (sqlButton.isSelected()) { String titleString = "Comparing " + left.getName() + " to " + right.getName() + " using SQL"; SQLDatabase db = null; if ( source.loadRadio.isSelected() ) db = null; else if (source.playPenRadio.isSelected()) db = ArchitectFrame.getMainInstance().playpen.getDatabase(); else db = source.getDatabase(); SQLScriptDialog ssd = new SQLScriptDialog(ArchitectFrame.getMainInstance(), "Compare DM", titleString, false, gen, db == null?null:db.getDataSource(), false); ssd.setVisible(true); } else { CompareDMFrame cf = new CompareDMFrame(sourceDoc, targetDoc, left.getName(),right.getName()); cf.pack(); cf.setVisible(true); } } catch (ArchitectDiffException ex) { JOptionPane.showMessageDialog(CompareDMPanel.this, "Could not perform the diff:\n" + ex.getMessage(), "Diff Error", JOptionPane.ERROR_MESSAGE); logger.error("Couldn't do diff", ex); } catch (ArchitectException exp) { ASUtils.showExceptionDialog("StartCompareAction failed", exp); logger.error("StartCompareAction failed", exp); } catch (BadLocationException ex) { ASUtils.showExceptionDialog( "Could not create document for results", ex); logger.error("Could not create document for results", ex); } catch (Exception ex) { ASUtils.showExceptionDialog("Unxepected Exception!", ex); logger.error("Unxepected Exception!", ex); } finally { startCompareAction.setEnabled(isStartable()); } logger.debug("cleanup finished"); } }; new Thread(compareWorker).start(); new ProgressWatcher(progressBar,sourceComp); } |
|
if (source.physicalRadio.isSelected()) { SQLCatalog cat = (SQLCatalog) source.catalogDropdown.getSelectedItem(); SQLSchema sch = (SQLSchema) source.schemaDropdown.getSelectedItem(); gen.setTargetCatalog(cat == null ? null : cat.getPhysicalName()); gen.setTargetSchema(sch == null ? null : sch.getPhysicalName()); } | public void cleanup() { logger.debug("cleanup starts"); try { DefaultStyledDocument sourceDoc = new DefaultStyledDocument(); DefaultStyledDocument targetDoc = new DefaultStyledDocument(); DDLGenerator gen =(DDLGenerator)((Class)((LabelValueBean) sqlTypeDropdown.getSelectedItem()).getValue()).newInstance(); final Map<DiffType, AttributeSet> styles = new HashMap<DiffType, AttributeSet>(); { SimpleAttributeSet att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.red); styles.put(DiffType.LEFTONLY, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.green.darker().darker()); styles.put(DiffType.RIGHTONLY, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.black); styles.put(DiffType.SAME, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.orange); styles.put(DiffType.MODIFIED, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.blue); styles.put(DiffType.KEY_CHANGED, att); } if (sqlButton.isSelected()) { List<DiffChunk<SQLObject>> addRelationships = new ArrayList<DiffChunk<SQLObject>>(); List<DiffChunk<SQLObject>> dropRelationships = new ArrayList<DiffChunk<SQLObject>>(); List<DiffChunk<SQLObject>> nonRelationship = new ArrayList<DiffChunk<SQLObject>> (); for (DiffChunk d : diff) { if (d.getData() instanceof SQLRelationship) { if (d.getType() == DiffType.LEFTONLY) { dropRelationships.add(d); } else if (d.getType() == DiffType.RIGHTONLY) { addRelationships.add(d); } } else { nonRelationship.add(d); } } sqlScriptGenerator(styles, dropRelationships, gen); sqlScriptGenerator(styles, nonRelationship, gen); sqlScriptGenerator(styles, addRelationships, gen); } else if (englishButton.isSelected()) { generateEnglishDescription(styles, diff, sourceDoc); generateEnglishDescription(styles, diff1, targetDoc); } else { throw new IllegalStateException( "Don't know what type of output to make"); } // get the title string for the compareDMFrame if (sqlButton.isSelected()) { String titleString = "Comparing " + left.getName() + " to " + right.getName() + " using SQL"; SQLDatabase db = null; if ( source.loadRadio.isSelected() ) db = null; else if (source.playPenRadio.isSelected()) db = ArchitectFrame.getMainInstance().playpen.getDatabase(); else db = source.getDatabase(); SQLScriptDialog ssd = new SQLScriptDialog(ArchitectFrame.getMainInstance(), "Compare DM", titleString, false, gen, db == null?null:db.getDataSource(), false); ssd.setVisible(true); } else { CompareDMFrame cf = new CompareDMFrame(sourceDoc, targetDoc, left.getName(),right.getName()); cf.pack(); cf.setVisible(true); } } catch (ArchitectDiffException ex) { JOptionPane.showMessageDialog(CompareDMPanel.this, "Could not perform the diff:\n" + ex.getMessage(), "Diff Error", JOptionPane.ERROR_MESSAGE); logger.error("Couldn't do diff", ex); } catch (ArchitectException exp) { ASUtils.showExceptionDialog("StartCompareAction failed", exp); logger.error("StartCompareAction failed", exp); } catch (BadLocationException ex) { ASUtils.showExceptionDialog( "Could not create document for results", ex); logger.error("Could not create document for results", ex); } catch (Exception ex) { ASUtils.showExceptionDialog("Unxepected Exception!", ex); logger.error("Unxepected Exception!", ex); } finally { startCompareAction.setEnabled(isStartable()); } logger.debug("cleanup finished"); } |
|
if (logger.isDebugEnabled()) logger.debug(d); | public void cleanup() { logger.debug("cleanup starts"); try { DefaultStyledDocument sourceDoc = new DefaultStyledDocument(); DefaultStyledDocument targetDoc = new DefaultStyledDocument(); DDLGenerator gen =(DDLGenerator)((Class)((LabelValueBean) sqlTypeDropdown.getSelectedItem()).getValue()).newInstance(); final Map<DiffType, AttributeSet> styles = new HashMap<DiffType, AttributeSet>(); { SimpleAttributeSet att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.red); styles.put(DiffType.LEFTONLY, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.green.darker().darker()); styles.put(DiffType.RIGHTONLY, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.black); styles.put(DiffType.SAME, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.orange); styles.put(DiffType.MODIFIED, att); att = new SimpleAttributeSet(); StyleConstants.setForeground(att, Color.blue); styles.put(DiffType.KEY_CHANGED, att); } if (sqlButton.isSelected()) { List<DiffChunk<SQLObject>> addRelationships = new ArrayList<DiffChunk<SQLObject>>(); List<DiffChunk<SQLObject>> dropRelationships = new ArrayList<DiffChunk<SQLObject>>(); List<DiffChunk<SQLObject>> nonRelationship = new ArrayList<DiffChunk<SQLObject>> (); for (DiffChunk d : diff) { if (d.getData() instanceof SQLRelationship) { if (d.getType() == DiffType.LEFTONLY) { dropRelationships.add(d); } else if (d.getType() == DiffType.RIGHTONLY) { addRelationships.add(d); } } else { nonRelationship.add(d); } } sqlScriptGenerator(styles, dropRelationships, gen); sqlScriptGenerator(styles, nonRelationship, gen); sqlScriptGenerator(styles, addRelationships, gen); } else if (englishButton.isSelected()) { generateEnglishDescription(styles, diff, sourceDoc); generateEnglishDescription(styles, diff1, targetDoc); } else { throw new IllegalStateException( "Don't know what type of output to make"); } // get the title string for the compareDMFrame if (sqlButton.isSelected()) { String titleString = "Comparing " + left.getName() + " to " + right.getName() + " using SQL"; SQLDatabase db = null; if ( source.loadRadio.isSelected() ) db = null; else if (source.playPenRadio.isSelected()) db = ArchitectFrame.getMainInstance().playpen.getDatabase(); else db = source.getDatabase(); SQLScriptDialog ssd = new SQLScriptDialog(ArchitectFrame.getMainInstance(), "Compare DM", titleString, false, gen, db == null?null:db.getDataSource(), false); ssd.setVisible(true); } else { CompareDMFrame cf = new CompareDMFrame(sourceDoc, targetDoc, left.getName(),right.getName()); cf.pack(); cf.setVisible(true); } } catch (ArchitectDiffException ex) { JOptionPane.showMessageDialog(CompareDMPanel.this, "Could not perform the diff:\n" + ex.getMessage(), "Diff Error", JOptionPane.ERROR_MESSAGE); logger.error("Couldn't do diff", ex); } catch (ArchitectException exp) { ASUtils.showExceptionDialog("StartCompareAction failed", exp); logger.error("StartCompareAction failed", exp); } catch (BadLocationException ex) { ASUtils.showExceptionDialog( "Could not create document for results", ex); logger.error("Could not create document for results", ex); } catch (Exception ex) { ASUtils.showExceptionDialog("Unxepected Exception!", ex); logger.error("Unxepected Exception!", ex); } finally { startCompareAction.setEnabled(isStartable()); } logger.debug("cleanup finished"); } |
|
gen.addPrimaryKey(t,t.getPrimaryKeyName()); | gen.addPrimaryKey(t); | private void sqlScriptGenerator(Map<DiffType, AttributeSet> styles, List<DiffChunk<SQLObject>> diff, DDLGenerator gen) throws ArchitectDiffException, SQLException, ArchitectException, BadLocationException, InstantiationException, IllegalAccessException { for (DiffChunk<SQLObject> chunk : diff) { if (chunk.getType() == DiffType.KEY_CHANGED) { if(chunk.getData() instanceof SQLTable) { SQLTable t = (SQLTable) chunk.getData(); boolean hasKey = false; for (SQLColumn c : t.getColumns()) { if (c.isPrimaryKey()) { hasKey=true; break; } } if (hasKey) { gen.addPrimaryKey(t,t.getPrimaryKeyName()); } else { gen.dropPrimaryKey(t,t.getPrimaryKeyName()); } } }else if (chunk.getType() == DiffType.LEFTONLY) { if (chunk.getData() instanceof SQLTable) { SQLTable t = (SQLTable) chunk.getData(); gen.dropTable(t); }else if (chunk.getData() instanceof SQLColumn){ SQLColumn c = (SQLColumn) chunk.getData(); gen.dropColumn(c,c.getParentTable()); } else if (chunk.getData() instanceof SQLRelationship){ SQLRelationship r = (SQLRelationship)chunk.getData(); gen.dropRelationship(r); } else { throw new IllegalStateException("DiffChunk is an unexpected type."); } } else if (chunk.getType() == DiffType.RIGHTONLY){ if (chunk.getData() instanceof SQLTable) { SQLTable t = (SQLTable) chunk.getData(); gen.writeTable(t); }else if (chunk.getData() instanceof SQLColumn){ SQLColumn c = (SQLColumn) chunk.getData(); gen.addColumn(c,c.getParentTable()); }else if (chunk.getData() instanceof SQLRelationship){ SQLRelationship r = (SQLRelationship)chunk.getData(); gen.addRelationship(r); }else { throw new IllegalStateException("DiffChunk is an unexpected type."); } } else if (chunk.getType() == DiffType.MODIFIED) { if (chunk.getData() instanceof SQLColumn) { SQLColumn c = (SQLColumn) chunk.getData(); gen.modifyColumn(c); } else { throw new IllegalStateException("DiffChunk is an unexpected type."); } } else { } } } |
gen.dropPrimaryKey(t,t.getPrimaryKeyName()); | gen.dropPrimaryKey(t); | private void sqlScriptGenerator(Map<DiffType, AttributeSet> styles, List<DiffChunk<SQLObject>> diff, DDLGenerator gen) throws ArchitectDiffException, SQLException, ArchitectException, BadLocationException, InstantiationException, IllegalAccessException { for (DiffChunk<SQLObject> chunk : diff) { if (chunk.getType() == DiffType.KEY_CHANGED) { if(chunk.getData() instanceof SQLTable) { SQLTable t = (SQLTable) chunk.getData(); boolean hasKey = false; for (SQLColumn c : t.getColumns()) { if (c.isPrimaryKey()) { hasKey=true; break; } } if (hasKey) { gen.addPrimaryKey(t,t.getPrimaryKeyName()); } else { gen.dropPrimaryKey(t,t.getPrimaryKeyName()); } } }else if (chunk.getType() == DiffType.LEFTONLY) { if (chunk.getData() instanceof SQLTable) { SQLTable t = (SQLTable) chunk.getData(); gen.dropTable(t); }else if (chunk.getData() instanceof SQLColumn){ SQLColumn c = (SQLColumn) chunk.getData(); gen.dropColumn(c,c.getParentTable()); } else if (chunk.getData() instanceof SQLRelationship){ SQLRelationship r = (SQLRelationship)chunk.getData(); gen.dropRelationship(r); } else { throw new IllegalStateException("DiffChunk is an unexpected type."); } } else if (chunk.getType() == DiffType.RIGHTONLY){ if (chunk.getData() instanceof SQLTable) { SQLTable t = (SQLTable) chunk.getData(); gen.writeTable(t); }else if (chunk.getData() instanceof SQLColumn){ SQLColumn c = (SQLColumn) chunk.getData(); gen.addColumn(c,c.getParentTable()); }else if (chunk.getData() instanceof SQLRelationship){ SQLRelationship r = (SQLRelationship)chunk.getData(); gen.addRelationship(r); }else { throw new IllegalStateException("DiffChunk is an unexpected type."); } } else if (chunk.getType() == DiffType.MODIFIED) { if (chunk.getData() instanceof SQLColumn) { SQLColumn c = (SQLColumn) chunk.getData(); gen.modifyColumn(c); } else { throw new IllegalStateException("DiffChunk is an unexpected type."); } } else { } } } |
gen.dropColumn(c,c.getParentTable()); | gen.dropColumn(c); | private void sqlScriptGenerator(Map<DiffType, AttributeSet> styles, List<DiffChunk<SQLObject>> diff, DDLGenerator gen) throws ArchitectDiffException, SQLException, ArchitectException, BadLocationException, InstantiationException, IllegalAccessException { for (DiffChunk<SQLObject> chunk : diff) { if (chunk.getType() == DiffType.KEY_CHANGED) { if(chunk.getData() instanceof SQLTable) { SQLTable t = (SQLTable) chunk.getData(); boolean hasKey = false; for (SQLColumn c : t.getColumns()) { if (c.isPrimaryKey()) { hasKey=true; break; } } if (hasKey) { gen.addPrimaryKey(t,t.getPrimaryKeyName()); } else { gen.dropPrimaryKey(t,t.getPrimaryKeyName()); } } }else if (chunk.getType() == DiffType.LEFTONLY) { if (chunk.getData() instanceof SQLTable) { SQLTable t = (SQLTable) chunk.getData(); gen.dropTable(t); }else if (chunk.getData() instanceof SQLColumn){ SQLColumn c = (SQLColumn) chunk.getData(); gen.dropColumn(c,c.getParentTable()); } else if (chunk.getData() instanceof SQLRelationship){ SQLRelationship r = (SQLRelationship)chunk.getData(); gen.dropRelationship(r); } else { throw new IllegalStateException("DiffChunk is an unexpected type."); } } else if (chunk.getType() == DiffType.RIGHTONLY){ if (chunk.getData() instanceof SQLTable) { SQLTable t = (SQLTable) chunk.getData(); gen.writeTable(t); }else if (chunk.getData() instanceof SQLColumn){ SQLColumn c = (SQLColumn) chunk.getData(); gen.addColumn(c,c.getParentTable()); }else if (chunk.getData() instanceof SQLRelationship){ SQLRelationship r = (SQLRelationship)chunk.getData(); gen.addRelationship(r); }else { throw new IllegalStateException("DiffChunk is an unexpected type."); } } else if (chunk.getType() == DiffType.MODIFIED) { if (chunk.getData() instanceof SQLColumn) { SQLColumn c = (SQLColumn) chunk.getData(); gen.modifyColumn(c); } else { throw new IllegalStateException("DiffChunk is an unexpected type."); } } else { } } } |
gen.addColumn(c,c.getParentTable()); | gen.addColumn(c); | private void sqlScriptGenerator(Map<DiffType, AttributeSet> styles, List<DiffChunk<SQLObject>> diff, DDLGenerator gen) throws ArchitectDiffException, SQLException, ArchitectException, BadLocationException, InstantiationException, IllegalAccessException { for (DiffChunk<SQLObject> chunk : diff) { if (chunk.getType() == DiffType.KEY_CHANGED) { if(chunk.getData() instanceof SQLTable) { SQLTable t = (SQLTable) chunk.getData(); boolean hasKey = false; for (SQLColumn c : t.getColumns()) { if (c.isPrimaryKey()) { hasKey=true; break; } } if (hasKey) { gen.addPrimaryKey(t,t.getPrimaryKeyName()); } else { gen.dropPrimaryKey(t,t.getPrimaryKeyName()); } } }else if (chunk.getType() == DiffType.LEFTONLY) { if (chunk.getData() instanceof SQLTable) { SQLTable t = (SQLTable) chunk.getData(); gen.dropTable(t); }else if (chunk.getData() instanceof SQLColumn){ SQLColumn c = (SQLColumn) chunk.getData(); gen.dropColumn(c,c.getParentTable()); } else if (chunk.getData() instanceof SQLRelationship){ SQLRelationship r = (SQLRelationship)chunk.getData(); gen.dropRelationship(r); } else { throw new IllegalStateException("DiffChunk is an unexpected type."); } } else if (chunk.getType() == DiffType.RIGHTONLY){ if (chunk.getData() instanceof SQLTable) { SQLTable t = (SQLTable) chunk.getData(); gen.writeTable(t); }else if (chunk.getData() instanceof SQLColumn){ SQLColumn c = (SQLColumn) chunk.getData(); gen.addColumn(c,c.getParentTable()); }else if (chunk.getData() instanceof SQLRelationship){ SQLRelationship r = (SQLRelationship)chunk.getData(); gen.addRelationship(r); }else { throw new IllegalStateException("DiffChunk is an unexpected type."); } } else if (chunk.getType() == DiffType.MODIFIED) { if (chunk.getData() instanceof SQLColumn) { SQLColumn c = (SQLColumn) chunk.getData(); gen.modifyColumn(c); } else { throw new IllegalStateException("DiffChunk is an unexpected type."); } } else { } } } |
bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false); | public void actionPerformed(ActionEvent evt) { // update the progress bar logger.debug("updating progress bar..."); try { Integer jobSize = monitorable.getJobSize(); if (bar != null) { if (monitorable.hasStarted()) { if (jobSize == null) { bar.setIndeterminate(true); } else { bar.setIndeterminate(false); bar.setMaximum(jobSize.intValue()); } bar.setVisible(true); bar.setValue(monitorable.getProgress()); bar.setIndeterminate(false); } } if (label != null) { label.setVisible(true); } if (pm != null) { // using ProgressMonitor if (monitorable.hasStarted()) { if (jobSize != null) { pm.setMaximum(jobSize.intValue()); } pm.setProgress(monitorable.getProgress()); logger.debug("progress: " + monitorable.getProgress()); pm.setNote(monitorable.getMessage()); } } } catch (ArchitectException e) { logger.error("Couldn't update progress bar (Monitorable threw an exception)", e); } finally { try { logger.debug("monitorable.isFinished():" + monitorable.isFinished()); if (monitorable.isFinished()) { if (label != null) { label.setText(""); } if (bar != null) { bar.setValue(0); } if (pm != null) { logger.debug("pm done, max was: " + pm.getMaximum()); pm.close(); } // fire a taskTerminationEvent fireTaskFinished(); logger.debug("trying to stop timer thread..."); timer.stop(); logger.debug("did the timer thread stop???"); } } catch (ArchitectException e1) { logger.error("Couldn't tell if Monitorable was finished (it threw an exception)", e1); } } } |
|
System.out.println( counts[0][0] + "\t" + counts[1][1] + "\t" + counts[0][1] + "\t" + counts[1][0]); | public Vector calcTDT(Vector chromosomes) { int numChroms; Chromosome chromT, chromU,chromTemp; String ped,ind; Vector temp; numChroms = chromosomes.size(); temp = (Vector)chromosomes.clone(); chromosomes = temp; int numMarkers = Chromosome.getSize(); for(int k=0;k<numMarkers;k++){ this.results.add(new TDTResult(Chromosome.getMarker(k))); } for(int i=0;i<chromosomes.size()-3;i++){ Chromosome chrom1T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom1U = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2T = (Chromosome)chromosomes.get(i); i++; Chromosome chrom2U = (Chromosome)chromosomes.get(i); //System.out.println("ind1T: " + chrom1T.getPed() + "\t" + chrom1T.getIndividual() ); //System.out.println("ind1U: " + chrom1U.getPed() + "\t" + chrom1U.getIndividual() ); //System.out.println("ind2T: " + chrom2T.getPed() + "\t" + chrom2T.getIndividual() ); //System.out.println("ind2U: " + chrom2U.getPed() + "\t" + chrom2U.getIndividual() ); for(int j=0;j<numMarkers;j++){ byte allele1T = chrom1T.getGenotype(j); byte allele1U = chrom1U.getGenotype(j); byte allele2T = chrom2T.getGenotype(j); byte allele2U = chrom2U.getGenotype(j); TDTResult curRes = (TDTResult)results.get(j); //System.out.println("marker "+ j + ":\t " + allele1T + "\t" + allele1U + "\t" + allele2T + "\t" + allele2U); curRes.tallyInd(allele1T,allele1U); curRes.tallyInd(allele2T,allele2U); } } for(int i=0;i<this.results.size();i++){ TDTResult tempRes = (TDTResult)this.results.get(i); int[][] counts = tempRes.counts; System.out.println( counts[0][0] + "\t" + counts[1][1] + "\t" + counts[0][1] + "\t" + counts[1][0]); } return this.results; } |
|
public SQLCatalog(SQLDatabase parent, String name) { this.parent = parent; this.catalogName = name; this.children = new LinkedList(); | public SQLCatalog() { this(null, null); | public SQLCatalog(SQLDatabase parent, String name) { this.parent = parent; this.catalogName = name; this.children = new LinkedList(); } |
testRefImageDir = new File( "/home/harri/projects/photovault/tests/images/photovault/swingui/TestThumbnailView/" ); | super(); testRefImageDir = new File( "tests/images/photovault/swingui/TestThumbnailView/" ); | public Test_ThumbnailView() { testRefImageDir = new File( "/home/harri/projects/photovault/tests/images/photovault/swingui/TestThumbnailView/" ); testRefImageDir.mkdirs(); } |
protected void setUp() { | public void setUp() { super.setUp(); | protected void setUp() { // Create a frame with the test instance name as the title frame = new JFrame(getName()); pane = (JPanel)frame.getContentPane(); pane.setLayout(new FlowLayout()); pane.setBorder(new EmptyBorder(50, 50, 50, 50)); tester = ComponentTester.getTester(ThumbnailView.class); File f = new File(testImgDir, "test1.jpg" ); try { photo = PhotoInfo.addToDB( f ); } catch( PhotoNotFoundException e ) { fail( "error creating photo" ); } photo.setShootingPlace( "TESSTPLACE" ); } |
public void doTag(XMLOutput output) throws Exception | public void doTag(XMLOutput output) throws JellyTagException | public void doTag(XMLOutput output) throws Exception { output.write("output - ignored body"); } |
output.write("output - ignored body"); | try { output.write("output - ignored body"); } catch (SAXException e) { throw new JellyTagException(e); } | public void doTag(XMLOutput output) throws Exception { output.write("output - ignored body"); } |
boolean a = result.getTotalOrdering() == OrderingList.get(StorableTestBasic.class, "+doubleProp", "+stringProp"); boolean b = result.getTotalOrdering() == OrderingList.get(StorableTestBasic.class, "+stringProp", "+doubleProp"); assertTrue(a || b); | assertEquals(OrderingList.get(StorableTestBasic.class, "+stringProp", "+doubleProp"), result.getTotalOrdering()); | public void testComplexUnionPlan3() throws Exception { UnionQueryAnalyzer uqa = new UnionQueryAnalyzer(StorableTestBasic.class, TestIndexedQueryAnalyzer.RepoAccess.INSTANCE); Filter<StorableTestBasic> filter = Filter.filterFor (StorableTestBasic.class, "stringProp = ? | stringProp = ?"); filter = filter.bind(); UnionQueryAnalyzer.Result result = uqa.analyze(filter, null); boolean a = result.getTotalOrdering() == OrderingList.get(StorableTestBasic.class, "+doubleProp", "+stringProp"); boolean b = result.getTotalOrdering() == OrderingList.get(StorableTestBasic.class, "+stringProp", "+doubleProp"); assertTrue(a || b); QueryExecutor<StorableTestBasic> exec = result.createExecutor(); assertEquals(filter, exec.getFilter()); a = exec.getOrdering() == OrderingList.get(StorableTestBasic.class, "+doubleProp", "+stringProp"); b = exec.getOrdering() == OrderingList.get(StorableTestBasic.class, "+stringProp", "+doubleProp"); assertTrue(a || b); List<IndexedQueryAnalyzer<Shipment>.Result> subResults = result.getSubResults(); assertEquals(2, subResults.size()); StringBuffer buf = new StringBuffer(); exec.printPlan(buf, 0, null); String plan = buf.toString(); String expexted = "union\n" + " index scan: com.amazon.carbonado.stored.StorableTestBasic\n" + " ...index: {properties=[+stringProp, +doubleProp], unique=true}\n" + " ...identity filter: stringProp = ?\n" + " index scan: com.amazon.carbonado.stored.StorableTestBasic\n" + " ...index: {properties=[+stringProp, +doubleProp], unique=true}\n" + " ...identity filter: stringProp = ?[2]\n"; // Test test will fail if the format of the plan changes. assertEquals(expexted, plan); } |
assertEquals(filter, exec.getFilter()); a = exec.getOrdering() == OrderingList.get(StorableTestBasic.class, "+doubleProp", "+stringProp"); b = exec.getOrdering() == OrderingList.get(StorableTestBasic.class, "+stringProp", "+doubleProp"); assertTrue(a || b); | assertEquals(OrderingList.get(StorableTestBasic.class, "+stringProp", "+doubleProp"), exec.getOrdering()); | public void testComplexUnionPlan3() throws Exception { UnionQueryAnalyzer uqa = new UnionQueryAnalyzer(StorableTestBasic.class, TestIndexedQueryAnalyzer.RepoAccess.INSTANCE); Filter<StorableTestBasic> filter = Filter.filterFor (StorableTestBasic.class, "stringProp = ? | stringProp = ?"); filter = filter.bind(); UnionQueryAnalyzer.Result result = uqa.analyze(filter, null); boolean a = result.getTotalOrdering() == OrderingList.get(StorableTestBasic.class, "+doubleProp", "+stringProp"); boolean b = result.getTotalOrdering() == OrderingList.get(StorableTestBasic.class, "+stringProp", "+doubleProp"); assertTrue(a || b); QueryExecutor<StorableTestBasic> exec = result.createExecutor(); assertEquals(filter, exec.getFilter()); a = exec.getOrdering() == OrderingList.get(StorableTestBasic.class, "+doubleProp", "+stringProp"); b = exec.getOrdering() == OrderingList.get(StorableTestBasic.class, "+stringProp", "+doubleProp"); assertTrue(a || b); List<IndexedQueryAnalyzer<Shipment>.Result> subResults = result.getSubResults(); assertEquals(2, subResults.size()); StringBuffer buf = new StringBuffer(); exec.printPlan(buf, 0, null); String plan = buf.toString(); String expexted = "union\n" + " index scan: com.amazon.carbonado.stored.StorableTestBasic\n" + " ...index: {properties=[+stringProp, +doubleProp], unique=true}\n" + " ...identity filter: stringProp = ?\n" + " index scan: com.amazon.carbonado.stored.StorableTestBasic\n" + " ...index: {properties=[+stringProp, +doubleProp], unique=true}\n" + " ...identity filter: stringProp = ?[2]\n"; // Test test will fail if the format of the plan changes. assertEquals(expexted, plan); } |
int pmMax = countSourceTables((SQLObject) sourceDatabases.getModel().getRoot()) + playPen.getPPComponentCount() * 2; logger.debug("Setting progress monitor maximum to "+pmMax); | if (savingEntireSource) { pmMax = ArchitectUtils.countTablesSnapshot((SQLObject) sourceDatabases.getModel().getRoot()); } else { pmMax = ArchitectUtils.countTables((SQLObject) sourceDatabases.getModel().getRoot()); } logger.error("Setting progress monitor maximum to "+pmMax); | public void save(ProgressMonitor pm) throws IOException, ArchitectException { // write to temp file and then rename (this preserves old project file // when there's problems) if (file.exists() && !file.canWrite()) { // write problems with architect file will muck up the save process throw new ArchitectException("problem saving project -- " + "cannot write to architect file: " + file.getAbsolutePath()); } File backupFile = new File (file.getParent(), file.getName()+"~"); File tempFile = new File (file.getParent(),"tmp___" + file.getName()); out = new PrintWriter(new BufferedWriter(new FileWriter(tempFile))); objectIdMap = new HashMap(); dbcsIdMap = new HashMap(); indent = 0; progress = 0; boolean saveOk = false; // use this to determine if save process worked this.pm = pm; if (pm != null) { pm.setMinimum(0); int pmMax = countSourceTables((SQLObject) sourceDatabases.getModel().getRoot()) + playPen.getPPComponentCount() * 2; logger.debug("Setting progress monitor maximum to "+pmMax); pm.setMaximum(pmMax); pm.setProgress(progress); pm.setMillisToDecideToPopup(0); } try { println("<?xml version=\"1.0\"?>"); println("<architect-project version=\"0.1\">"); indent++; println("<project-name>"+name+"</project-name>"); saveDataSources(); saveSourceDatabases(); saveTargetDatabase(); saveDDLGenerator(); savePlayPen(); indent--; println("</architect-project>"); setModified(false); saveOk = true; } finally { if (out != null) out.close(); out = null; if (pm != null) pm.close(); pm = null; } // do the rename dance if (saveOk) { boolean fstatus = false; fstatus = backupFile.delete(); logger.debug("deleting backup~ file: " + fstatus); fstatus = file.renameTo(backupFile); logger.debug("renaming current file to backupFile: " + fstatus); fstatus = tempFile.renameTo(file); logger.debug("renaming tempFile to current file: " + fstatus); } } |
try { ImageIO.write( exportImage, "jpg", file ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; | String ftype = "jpg"; String imageFname = file.getName(); int extIndex = imageFname.lastIndexOf( "." ) + 1; if ( extIndex > 0 ) { ftype = imageFname.substring( extIndex ); | public void exportPhoto( File file, int width, int height ) { ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are no instances, nothing can be exported log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); AffineTransform xform = photovault.image. ImageXform.getFittingXform( width, height, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_BILINEAR ); BufferedImage exportImage = atOp.filter( origImage, null ); // Save it try { ImageIO.write( exportImage, "jpg", file ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } } |
try { ImageWriter writer = null; Iterator iter = ImageIO.getImageWritersByFormatName( ftype ); if (iter.hasNext()) writer = (ImageWriter)iter.next(); if (writer != null) { ImageOutputStream ios = null; try { ios = ImageIO.createImageOutputStream( file ); writer.setOutput(ios); ImageWriteParam param = writer.getDefaultWriteParam(); if (exportImage.getType() == BufferedImage.TYPE_INT_ARGB ) { ColorModel cm = new ComponentColorModel(exportImage.getColorModel().getColorSpace(), false, false, Transparency.OPAQUE, DataBuffer.TYPE_BYTE); int[] bands = {0, 1, 2}; param.setSourceBands(bands); param.setDestinationType(new ImageTypeSpecifier(cm, cm.createCompatibleSampleModel(1, 1))); } writer.write(null, new IIOImage(exportImage, null, null), param); ios.flush(); } finally { if (ios != null) ios.close(); writer.dispose(); } } } catch ( IOException e ) { log.warn( "Error writing exported image: " + e.getMessage() ); txw.abort(); return; } | public void exportPhoto( File file, int width, int height ) { ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are no instances, nothing can be exported log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } // Read the image BufferedImage origImage = null; try { origImage = ImageIO.read( original.getImageFile() ); } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); AffineTransform xform = photovault.image. ImageXform.getFittingXform( width, height, prefRotation -original.getRotated(), origWidth, origHeight ); // Create the target image AffineTransformOp atOp = new AffineTransformOp( xform, AffineTransformOp.TYPE_BILINEAR ); BufferedImage exportImage = atOp.filter( origImage, null ); // Save it try { ImageIO.write( exportImage, "jpg", file ); } catch ( IOException e ) { log.warn( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } } |
|
xml.append("\n <remarks>").append(ArchitectUtils.escapeXML(remarks)).append("</remarks>"); | public String toXML() { StringBuffer xml = new StringBuffer(); xml.append("<?xml version=\"1.0\" encoding=\"ISO-8859-1\" ?>"); xml.append("\n<architect-exception-report version=\"1.0\">"); xml.append("\n <exception class=\"").append(ArchitectUtils.escapeXML(exception.getClass().getName())).append("\">"); for (StackTraceElement ste : exception.getStackTrace()) { xml.append("\n <trace-element class=\"").append(ArchitectUtils.escapeXML(ste.getClassName())) .append("\" method=\"").append(ArchitectUtils.escapeXML(ste.getMethodName())) .append("\" file=\"").append(ArchitectUtils.escapeXML(ste.getFileName())) .append("\" line=\"").append(ste.getLineNumber()) .append("\" />"); } xml.append("\n </exception>"); xml.append("\n <architect-version>").append(ArchitectUtils.escapeXML(architectVersion)).append("</architect-version>"); xml.append("\n <architect-uptime>").append(applicationUptime).append("</architect-uptime>"); xml.append("\n <total-mem>").append(totalMem).append("</total-mem>"); xml.append("\n <free-mem>").append(freeMem).append("</free-mem>"); xml.append("\n <max-mem>").append(maxMem).append("</max-mem>"); xml.append("\n <jvm vendor=\"").append(ArchitectUtils.escapeXML(jvmVendor)).append("\" version=\"").append(ArchitectUtils.escapeXML(jvmVersion)).append("\" />"); xml.append("\n <os arch=\"").append(ArchitectUtils.escapeXML(osArch)).append("\" name=\"").append(ArchitectUtils.escapeXML(osName)).append("\" version=\"").append(ArchitectUtils.escapeXML(osVersion)).append("\" />"); xml.append("\n <num-source-connections>").append(numSourceConnections).append("</num-source-connections>"); xml.append("\n <num-objects-in-playpen>").append(numObjectsInPlayPen).append("</num-objects-in-playpen>"); xml.append("\n <user-activity-description>").append(ArchitectUtils.escapeXML(userActivityDescription)).append("</user-activity-description>"); xml.append("\n</architect-exception-report>"); xml.append("\n"); return xml.toString(); } |
|
ind = ((Family)families.get(((String[])order.elementAt(index))[0])).getMember( ((String[])order.elementAt(index))[1]); | ind = (Individual)order.elementAt(index); | public void parseHapMap(Vector rawLines) throws PedFileException { int colNum = -1; int numLines = rawLines.size(); Individual ind; this.order = new Vector(); //sort first Vector lines = new Vector(); Hashtable sortHelp = new Hashtable(numLines-1,1.0f); long[] pos = new long[numLines-1]; lines.add(rawLines.get(0)); for (int k = 1; k < numLines; k++){ StringTokenizer st = new StringTokenizer((String) rawLines.get(k)); //strip off 1st 3 cols st.nextToken();st.nextToken();st.nextToken(); pos[k-1] = new Long(st.nextToken()).longValue(); sortHelp.put(new Long(pos[k-1]),rawLines.get(k)); } Arrays.sort(pos); for (int i = 0; i < pos.length; i++){ lines.add(sortHelp.get(new Long(pos[i]))); } //enumerate indivs StringTokenizer st = new StringTokenizer((String)lines.get(0), "\n\t\" \""); int numMetaColumns = 0; boolean doneMeta = false; while(!doneMeta && st.hasMoreTokens()){ String thisfield = st.nextToken(); numMetaColumns++; //so currently the first person ID always starts with NA (Coriell ID) but //todo: will this be true with AA samples etc? if (thisfield.startsWith("NA")){ doneMeta = true; } } numMetaColumns--; st = new StringTokenizer((String)lines.get(0), "\n\t\" \""); for (int i = 0; i < numMetaColumns; i++){ st.nextToken(); } StringTokenizer dt; while (st.hasMoreTokens()){ ind = new Individual(numLines); String name = st.nextToken(); String details = (String)hapMapTranslate.get(name); if (details == null){ throw new PedFileException("Hapmap data format error: " + name); } dt = new StringTokenizer(details, "\n\t\" \""); ind.setFamilyID(dt.nextToken().trim()); ind.setIndividualID(dt.nextToken().trim()); ind.setDadID(dt.nextToken().trim()); ind.setMomID(dt.nextToken().trim()); try { ind.setGender(Integer.parseInt(dt.nextToken().trim())); ind.setAffectedStatus(Integer.parseInt(dt.nextToken().trim())); }catch(NumberFormatException nfe) { throw new PedFileException("File error: invalid gender or affected status for indiv " + name); } ind.setIsTyped(true); //check if the family exists already in the Hashtable Family fam = (Family)this.families.get(ind.getFamilyID()); if(fam == null){ //it doesnt exist, so create a new Family object fam = new Family(ind.getFamilyID()); } fam.addMember(ind); this.families.put(ind.getFamilyID(),fam); String[] indFamID = new String[2]; indFamID[0] = ind.getFamilyID(); indFamID[1] = ind.getIndividualID(); this.order.add(ind); } //start at k=1 to skip header which we just processed above. hminfo = new String[numLines-1][]; for(int k=1;k<numLines;k++){ StringTokenizer tokenizer = new StringTokenizer((String)lines.get(k)); //reading the first line if(colNum < 0){ //only check column number count for the first line colNum = tokenizer.countTokens(); } if(colNum != tokenizer.countTokens()) { //this line has a different number of columns //should send some sort of error message //TODO: add something which stores number of markers for all lines and checks that they're consistent throw new PedFileException("Line number mismatch in input file. line " + (k+1)); } if(tokenizer.hasMoreTokens()){ hminfo[k-1] = new String[2]; for (int skip = 0; skip < numMetaColumns; skip++){ //meta-data crap String s = tokenizer.nextToken().trim(); //get marker name, chrom and pos if (skip == 0){ hminfo[k-1][0] = s; } if (skip == 2){ if (Chromosome.dataChrom != null){ if (!Chromosome.dataChrom.equals(s)){ throw new PedFileException("Hapmap file format error on line " + (k+1)+ ":\n There appear to be multiple chromosomes in the file."); } }else{ Chromosome.dataChrom = s; } } if (skip == 3){ hminfo[k-1][1] = s; } } int index = 0; while(tokenizer.hasMoreTokens()){ ind = ((Family)families.get(((String[])order.elementAt(index))[0])).getMember( ((String[])order.elementAt(index))[1]); String alleles = tokenizer.nextToken(); int allele1=0, allele2=0; if (alleles.substring(0,1).equals("A")){ allele1 = 1; }else if (alleles.substring(0,1).equals("C")){ allele1 = 2; }else if (alleles.substring(0,1).equals("G")){ allele1 = 3; }else if (alleles.substring(0,1).equals("T")){ allele1 = 4; } if (alleles.substring(1,2).equals("A")){ allele2 = 1; }else if (alleles.substring(1,2).equals("C")){ allele2 = 2; }else if (alleles.substring(1,2).equals("G")){ allele2 = 3; }else if (alleles.substring(1,2).equals("T")){ allele2 = 4; } byte[] markers = new byte[2]; markers[0] = (byte)allele1; markers[1]= (byte)allele2; ind.addMarker(markers); index++; } } } } |
alignedPositions[alignedPositions.length-1], boxSize); | alignedPositions[alignedPositions.length-1] + boxSize, boxSize); | public void mouseClicked(MouseEvent e) { if ((e.getModifiers() & InputEvent.BUTTON1_MASK) == InputEvent.BUTTON1_MASK) { int clickX = e.getX(); int clickY = e.getY(); if (showWM && wmInteriorRect.contains(clickX,clickY)){ //convert a click on the worldmap to a point on the big picture int bigClickX = (((clickX - getVisibleRect().x - (worldmap.getWidth()-wmInteriorRect.width)/2) * chartSize.width) / wmInteriorRect.width)-getVisibleRect().width/2; int bigClickY = (((clickY - getVisibleRect().y - (worldmap.getHeight() - wmInteriorRect.height)/2 - (getVisibleRect().height-worldmap.getHeight())) * chartSize.height) / wmInteriorRect.height) - getVisibleRect().height/2; //if the clicks are near the edges, correct values if (bigClickX > chartSize.width - getVisibleRect().width){ bigClickX = chartSize.width - getVisibleRect().width; } if (bigClickX < 0){ bigClickX = 0; } if (bigClickY > chartSize.height - getVisibleRect().height){ bigClickY = chartSize.height - getVisibleRect().height; } if (bigClickY < 0){ bigClickY = 0; } ((JViewport)getParent()).setViewPosition(new Point(bigClickX,bigClickY)); }else{ theHV.changeBlocks(BLOX_CUSTOM); Rectangle2D blockselector = new Rectangle2D.Double(clickXShift-boxRadius,clickYShift - boxRadius, alignedPositions[alignedPositions.length-1], boxSize); if(blockselector.contains(clickX,clickY)){ int whichMarker = getPreciseMarkerAt(clickX - clickXShift); if (whichMarker > -1){ if (theData.isInBlock[whichMarker]){ theData.removeFromBlock(whichMarker); repaint(); } else if (whichMarker > 0 && whichMarker < Chromosome.realIndex.length){ theData.addMarkerIntoSurroundingBlock(whichMarker); } } } } } } |
for (int m = 0; m < 3; m++) { g.drawImage(blackNumImages[(m == 0) ? 10 : multiChars[m]-'0'], left + (LINE_SPAN - 7*CHAR_WIDTH/2)/2 + m*CHAR_WIDTH, | if (multidprimeArray[i] > 0.99){ g.drawImage(blackNumImages[1], left + (LINE_SPAN - 7*CHAR_WIDTH/2)/2 + CHAR_WIDTH, | public void paintComponent(Graphics graphics) { if (filteredHaplos == null){ super.paintComponent(graphics); return; } Graphics2D g = (Graphics2D) graphics; g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); //System.out.println(getSize()); Dimension size = getSize(); Dimension pref = getPreferredSize(); g.translate((size.width - pref.width) / 2, (size.height - pref.height) / 2); //g.drawRect(0, 0, pref.width, pref.height); final BasicStroke thinStroke = new BasicStroke(0.5f); final BasicStroke thickStroke = new BasicStroke(2.0f); // width of one letter of the haplotype block //int letterWidth = haploMetrics.charWidth('G'); //int percentWidth = pctMetrics.stringWidth(".000"); //final int verticalOffset = 43; // room for tags and diamonds int left = BORDER; int top = BORDER; //verticalOffset; //int totalWidth = 0; // percentages for each haplotype NumberFormat nf = NumberFormat.getInstance(); nf.setMinimumFractionDigits(3); nf.setMaximumFractionDigits(3); nf.setMinimumIntegerDigits(0); nf.setMaximumIntegerDigits(0); // multi reading, between the columns NumberFormat nfMulti = NumberFormat.getInstance(); nfMulti.setMinimumFractionDigits(2); nfMulti.setMaximumFractionDigits(2); nfMulti.setMinimumIntegerDigits(0); nfMulti.setMaximumIntegerDigits(0); int[][] lookupPos = new int[filteredHaplos.length][]; for (int p = 0; p < lookupPos.length; p++) { lookupPos[p] = new int[filteredHaplos[p].length]; for (int q = 0; q < lookupPos[p].length; q++){ lookupPos[p][filteredHaplos[p][q].getListOrder()] = q; } } // set number formatter to pad with appropriate number of zeroes NumberFormat nfMarker = NumberFormat.getInstance(); int markerCount = Chromosome.getSize(); // the +0.0000001 is because there is // some suckage where log(1000) / log(10) isn't actually 3 int markerDigits = (int) (0.0000001 + Math.log(markerCount) / Math.log(10)) + 1; nfMarker.setMinimumIntegerDigits(markerDigits); nfMarker.setMaximumIntegerDigits(markerDigits); //int tagShapeX[] = new int[3]; //int tagShapeY[] = new int[3]; //Polygon tagShape; int textRight = 0; // gets updated for scooting over // i = 0 to number of columns - 1 for (int i = 0; i < filteredHaplos.length; i++) { int[] markerNums = filteredHaplos[i][0].getMarkers(); boolean[] tags = filteredHaplos[i][0].getTags(); //int headerX = x; for (int z = 0; z < markerNums.length; z++) { //int tagMiddle = tagMetrics.getAscent() / 2; //int tagLeft = x + z*letterWidth + tagMiddle; //g.translate(tagLeft, 20); // if tag snp, draw little triangle pooper if (tags[z]) { g.drawImage(tagImage, left + z*CHAR_WIDTH, top + markerDigits*MARKER_CHAR_WIDTH + -(CHAR_HEIGHT - TAG_SPAN), null); } //g.rotate(-Math.PI / 2.0); //g.drawLine(0, 0, 0, 0); //g.setColor(Color.black); //g.drawString(nfMarker.format(markerNums[z]), 0, tagMiddle); char markerChars[] = nfMarker.format(Chromosome.realIndex[markerNums[z]]+1).toCharArray(); for (int m = 0; m < markerDigits; m++) { g.drawImage(markerNumImages[markerChars[m] - '0'], left + z*CHAR_WIDTH + (1 + CHAR_WIDTH - MARKER_CHAR_HEIGHT)/2, top + (markerDigits-m-1)*MARKER_CHAR_WIDTH, null); } // undo the transform.. no push/pop.. arrgh //g.rotate(Math.PI / 2.0); //g.translate(-tagLeft, -20); } // y position of the first image for the haplotype letter // top + the size of the marker digits + the size of the tag + // the character height centered in the row's height int above = top + markerDigits*MARKER_CHAR_WIDTH + TAG_SPAN + (ROW_HEIGHT - CHAR_HEIGHT) / 2; for (int j = 0; j < filteredHaplos[i].length; j++){ int curHapNum = lookupPos[i][j]; //String theHap = new String(); //String thePercentage = new String(); int[] theGeno = filteredHaplos[i][curHapNum].getGeno(); //getGeno(); // j is the row of haplotype for (int k = 0; k < theGeno.length; k++) { // theGeno[k] will be 1,2,3,4 (acgt) or 8 (for bad) g.drawImage(charImages[theGeno[k] - 1], left + k*CHAR_WIDTH, above + j*ROW_HEIGHT, null); } //draw the percentage value in non mono font double percent = filteredHaplos[i][curHapNum].getPercentage(); //thePercentage = " " + nf.format(percent); char percentChars[] = nf.format(percent).toCharArray(); // perhaps need an exceptional case for 1.0 being the percent for (int m = 0; m < percentChars.length; m++) { g.drawImage(grayNumImages[(m == 0) ? 10 : percentChars[m]-'0'], left + theGeno.length*CHAR_WIDTH + m*CHAR_WIDTH, above + j*ROW_HEIGHT, null); } // 4 is the number of chars in .999 for the percent textRight = left + theGeno.length*CHAR_WIDTH + 4*CHAR_WIDTH; g.setColor(Color.black); if (i < filteredHaplos.length - 1) { //draw crossovers for (int crossCount = 0; crossCount < filteredHaplos[i+1].length; crossCount++) { double crossVal = filteredHaplos[i][curHapNum].getCrossover(crossCount); //draw thin and thick lines int crossValue = (int) (crossVal*100); if (crossValue > thinThresh) { g.setStroke(crossValue > thickThresh ? thickStroke : thinStroke); int connectTo = filteredHaplos[i+1][crossCount].getListOrder(); g.drawLine(textRight + LINE_LEFT, above + j*ROW_HEIGHT + ROW_HEIGHT/2, textRight + LINE_RIGHT, above + connectTo*ROW_HEIGHT + ROW_HEIGHT/2); } } } } left = textRight; // add the multilocus d prime if appropriate if (i < filteredHaplos.length - 1) { //put the numbers in the right place vertically int depth; if (filteredHaplos[i].length > filteredHaplos[i+1].length){ depth = filteredHaplos[i].length; }else{ depth = filteredHaplos[i+1].length; } char multiChars[] = nfMulti.format(multidprimeArray[i]).toCharArray(); for (int m = 0; m < 3; m++) { // 7*CHAR_WIDTH/2 = CHAR_WIDTH*3.5 to center it better // since the . char is right-aligned, and visually off g.drawImage(blackNumImages[(m == 0) ? 10 : multiChars[m]-'0'], left + (LINE_SPAN - 7*CHAR_WIDTH/2)/2 + m*CHAR_WIDTH, above + (depth * ROW_HEIGHT), null); } //int multiX = x + totalWidth + 3; //g.drawString(nfMulti.format(multidprimeArray[i]), // multiX+2, windowY - 3); } left += LINE_SPAN; //x += (totalWidth + 40); //y = verticalOffset; //left = textRight + LINE_SPAN; } } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.