rem
stringlengths
0
477k
add
stringlengths
0
313k
context
stringlengths
6
599k
logger.debug("reading UserSettings from java.util.prefs.");
logger.debug("loading UserSettings from java.util.prefs.");
public CoreUserSettings read(ArchitectSession session) throws IOException { logger.debug("reading UserSettings from java.util.prefs."); if ( prefs == null ) { prefs = ArchitectFrame.getMainInstance().getPrefs(); } logger.debug("Preferences class = " + prefs.getClass()); CoreUserSettings userSettings = new CoreUserSettings(); int i; for (i = 0; i <= 99; i++) { String jarName = prefs.get(jarFilePrefName(i), null); logger.debug("read Jar File entry: " + jarName); if (jarName == null) { break; } logger.debug("Adding JarName: " + jarName); session.addDriverJar(jarName); } // XXX Put prefs in sub-node, just delete it before you start. for (; i <= 99; i++) { prefs.remove(jarFilePrefName(i)); } userSettings.setPlDotIniPath(prefs.get("PL.INI.PATH", null)); UserSettings swingUserSettings = userSettings.getSwingSettings(); swingUserSettings.setBoolean(SwingUserSettings.PLAYPEN_RENDER_ANTIALIASED, prefs.getBoolean(SwingUserSettings.PLAYPEN_RENDER_ANTIALIASED, false)); ETLUserSettings etlUserSettings = userSettings.getETLUserSettings(); etlUserSettings.setString(ETLUserSettings.PROP_PL_ENGINE_PATH, prefs.get(ETLUserSettings.PROP_PL_ENGINE_PATH, "")); etlUserSettings.setString(ETLUserSettings.PROP_ETL_LOG_PATH, prefs.get(ETLUserSettings.PROP_ETL_LOG_PATH, defaultHomeFile("etl.log"))); DDLUserSettings ddlUserSettings = userSettings.getDDLUserSettings(); ddlUserSettings.setString(DDLUserSettings.PROP_DDL_LOG_PATH,prefs.get(DDLUserSettings.PROP_DDL_LOG_PATH, defaultHomeFile("ddl.log"))); QFAUserSettings qfaUserSettings = userSettings.getQfaUserSettings(); qfaUserSettings.setBoolean(QFAUserSettings.EXCEPTION_REPORTING,prefs.getBoolean(QFAUserSettings.EXCEPTION_REPORTING,true)); PrintUserSettings printUserSettings = userSettings.getPrintUserSettings(); printUserSettings.setDefaultPrinterName( prefs.get(PrintUserSettings.DEFAULT_PRINTER_NAME, "")); return userSettings; }
prefs.remove(jarFilePrefName(i));
if (prefs.get(jarFilePrefName(i), null) != null) { prefs.remove(jarFilePrefName(i)); }
public CoreUserSettings read(ArchitectSession session) throws IOException { logger.debug("reading UserSettings from java.util.prefs."); if ( prefs == null ) { prefs = ArchitectFrame.getMainInstance().getPrefs(); } logger.debug("Preferences class = " + prefs.getClass()); CoreUserSettings userSettings = new CoreUserSettings(); int i; for (i = 0; i <= 99; i++) { String jarName = prefs.get(jarFilePrefName(i), null); logger.debug("read Jar File entry: " + jarName); if (jarName == null) { break; } logger.debug("Adding JarName: " + jarName); session.addDriverJar(jarName); } // XXX Put prefs in sub-node, just delete it before you start. for (; i <= 99; i++) { prefs.remove(jarFilePrefName(i)); } userSettings.setPlDotIniPath(prefs.get("PL.INI.PATH", null)); UserSettings swingUserSettings = userSettings.getSwingSettings(); swingUserSettings.setBoolean(SwingUserSettings.PLAYPEN_RENDER_ANTIALIASED, prefs.getBoolean(SwingUserSettings.PLAYPEN_RENDER_ANTIALIASED, false)); ETLUserSettings etlUserSettings = userSettings.getETLUserSettings(); etlUserSettings.setString(ETLUserSettings.PROP_PL_ENGINE_PATH, prefs.get(ETLUserSettings.PROP_PL_ENGINE_PATH, "")); etlUserSettings.setString(ETLUserSettings.PROP_ETL_LOG_PATH, prefs.get(ETLUserSettings.PROP_ETL_LOG_PATH, defaultHomeFile("etl.log"))); DDLUserSettings ddlUserSettings = userSettings.getDDLUserSettings(); ddlUserSettings.setString(DDLUserSettings.PROP_DDL_LOG_PATH,prefs.get(DDLUserSettings.PROP_DDL_LOG_PATH, defaultHomeFile("ddl.log"))); QFAUserSettings qfaUserSettings = userSettings.getQfaUserSettings(); qfaUserSettings.setBoolean(QFAUserSettings.EXCEPTION_REPORTING,prefs.getBoolean(QFAUserSettings.EXCEPTION_REPORTING,true)); PrintUserSettings printUserSettings = userSettings.getPrintUserSettings(); printUserSettings.setDefaultPrinterName( prefs.get(PrintUserSettings.DEFAULT_PRINTER_NAME, "")); return userSettings; }
config.setDashboards(getDashbardConfigList(application));
private ApplicationConfig getApplicationConfig(Element application){ List params = application.getChildren(PARAMETER); Iterator paramIterator = params.iterator(); Map<String, String> paramValues = new HashMap<String, String>(1); while(paramIterator.hasNext()){ Element param = (Element)paramIterator.next(); paramValues.put(param.getChildTextTrim(PARAMETER_NAME), param.getChildTextTrim(PARAMETER_VALUE)); } final String encryptedPassword = application.getAttributeValue(PASSWORD); String password = null; if(encryptedPassword != null){ password = Crypto.decrypt(encryptedPassword); } Integer port = null; if(application.getAttributeValue(PORT) != null){ port = new Integer(application.getAttributeValue(PORT)); } ApplicationConfig config = ApplicationConfigFactory.create( application.getAttributeValue(APPLICATION_ID), application.getAttributeValue(APPLICATION_NAME), application.getAttributeValue(APPLICATION_TYPE), application.getAttributeValue(HOST), port, application.getAttributeValue(URL), application.getAttributeValue(USERNAME), password, paramValues); config.setMBeans(getMBeanConfigList(application)); config.setAlerts(getAlertsList(application, config)); config.setGraphs(getGraphConfigList(application, config)); return config; }
new DashboardConfig(dashboard.getAttributeValue(DASHBOARD_ID), dashboard.getAttributeValue(DASHBOARD_NAME));
new DashboardConfig(dashboard.getAttributeValue(DASHBOARD_ID));
private List<DashboardConfig> getDashboardConfigList(List dashboards){ final List<DashboardConfig> dashboardConfigList = new LinkedList<DashboardConfig>(); for(Iterator it = dashboards.iterator(); it.hasNext();){ Element dashboard = (Element)it.next(); DashboardConfig dashboardConfig = new DashboardConfig(dashboard.getAttributeValue(DASHBOARD_ID), dashboard.getAttributeValue(DASHBOARD_NAME)); dashboardConfigList.add(dashboardConfig); } return dashboardConfigList; }
treeModel.nodeChanged(node );
void addPhotoToTree( PhotoInfo photo ) { Collection folders = photo.getFolders(); Iterator iter = folders.iterator(); while ( iter.hasNext() ) { PhotoFolder folder = (PhotoFolder) iter.next(); DefaultMutableTreeNode node = addFolder( folder ); FolderNode fn = (FolderNode) node.getUserObject(); fn.addPhoto( photo ); } }
int idx = parentNode.getIndex( treeNode ); parentNode.remove( treeNode ); int[] idxs = new int[1]; idxs[0] = idx; Object[] nodes = new Object[1]; nodes[0] = treeNode; treeModel.nodesWereRemoved( parentNode, idxs, nodes );
if ( treeNode.getChildCount() == 0 ) { int idx = parentNode.getIndex( treeNode ); parentNode.remove( treeNode ); int[] idxs = new int[1]; idxs[0] = idx; Object[] nodes = new Object[1]; nodes[0] = treeNode; treeModel.nodesWereRemoved( parentNode, idxs, nodes ); } else { treeModel.nodeChanged(treeNode); }
public void removeAllFromFolder( PhotoFolder f ) { addedToFolders.remove( f ); removedFromFolders.add( f ); // Remove the folder from the tree DefaultMutableTreeNode treeNode = (DefaultMutableTreeNode) folderNodes.get( f ); DefaultMutableTreeNode parentNode = (DefaultMutableTreeNode) treeNode.getParent(); int idx = parentNode.getIndex( treeNode ); parentNode.remove( treeNode ); int[] idxs = new int[1]; idxs[0] = idx; Object[] nodes = new Object[1]; nodes[0] = treeNode; treeModel.nodesWereRemoved( parentNode, idxs, nodes ); }
public TablePane(SQLTable m) {
public TablePane() {
public TablePane(SQLTable m) { setOpaque(true); setModel(m); setMinimumSize(new Dimension(100,200)); setPreferredSize(new Dimension(100,200)); dt = new DropTarget(this, new TablePaneDropListener()); dgl = new TablePaneDragGestureListener(); ds = new DragSource(); dgr = getToolkit().createDragGestureRecognizer(MouseDragGestureRecognizer.class, ds, this, DnDConstants.ACTION_MOVE, dgl); setInsertionPoint(COLUMN_INDEX_NONE); addMouseListener(new PopupListener()); updateUI(); }
setModel(m);
public TablePane(SQLTable m) { setOpaque(true); setModel(m); setMinimumSize(new Dimension(100,200)); setPreferredSize(new Dimension(100,200)); dt = new DropTarget(this, new TablePaneDropListener()); dgl = new TablePaneDragGestureListener(); ds = new DragSource(); dgr = getToolkit().createDragGestureRecognizer(MouseDragGestureRecognizer.class, ds, this, DnDConstants.ACTION_MOVE, dgl); setInsertionPoint(COLUMN_INDEX_NONE); addMouseListener(new PopupListener()); updateUI(); }
if(password != null && !password.equals(config.getPassword())){
if(!password.equals(ApplicationForm.FORM_PASSWORD)){
public ActionForward execute(WebContext context, ActionMapping mapping, ActionForm actionForm, HttpServletRequest request, HttpServletResponse response) throws Exception { AccessController.checkAccess(context.getServiceContext(), ACL_EDIT_APPLICATIONS); ApplicationForm appForm = (ApplicationForm)actionForm; ApplicationConfig config = ApplicationConfigManager.getApplicationConfig( appForm.getApplicationId()); assert config != null; config.setName(appForm.getName()); config.setHost(appForm.getHost()); if(appForm.getPort() != null) config.setPort(new Integer(appForm.getPort())); config.setURL(appForm.getURL()); config.setUsername(appForm.getUsername()); final String password = appForm.getPassword(); if(password != null && !password.equals(config.getPassword())){ config.setPassword(password); } ApplicationConfigManager.updateApplication(config); UserActivityLogger.getInstance().logActivity( context.getUser().getUsername(), "Updated application "+"\""+config.getName()+"\""); return mapping.findForward(Forwards.SUCCESS); }
Vector indList = getOrder();
Vector indList = (Vector)order.clone();
public Vector check() throws PedFileException{ //before we perform the check we want to prune out individuals with too much missing data //or trios which contain individuals with too much missing data Vector indList = getOrder(); Individual currentInd; Family currentFamily; //deal with individuals who are missing too much data for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.elementAt(x); currentFamily = getFamily(currentInd.getFamilyID()); double numMissing = 0; int numMarkers = currentInd.getNumMarkers(); for (int i = 0; i < numMarkers; i++){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == 0 || thisMarker[1] == 0){ numMissing++; } } if (numMissing/numMarkers > Options.getMissingThreshold()){ //this person is missing too much data so remove him and then deal //with his family connections order.removeElement(currentInd); axedPeople.add(currentInd.getIndividualID()); if (currentFamily.getNumMembers() > 1){ //there are more people in this family so deal with relatives appropriately if (currentInd.hasEitherParent()){ //I have parents, so kick out any of my kids. Enumeration peopleinFam = currentFamily.getMemberList(); while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getDadID().equals(currentInd.getIndividualID()) || nextMember.getMomID().equals(currentInd.getIndividualID())){ order.removeElement(nextMember); currentFamily.removeMember(nextMember.getIndividualID()); } } }else{ //I have no parents but need to check if my spouse does String spouseID = ""; Enumeration peopleinFam = currentFamily.getMemberList(); while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getDadID().equals(currentInd.getIndividualID())) spouseID = nextMember.getMomID(); if (nextMember.getMomID().equals(currentInd.getIndividualID())) spouseID = nextMember.getDadID(); } if (!spouseID.equals("")){ if (currentFamily.getMember(spouseID).hasEitherParent()){ //remove my kids and leave my spouse alone peopleinFam = currentFamily.getMemberList(); while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getDadID().equals(currentInd.getIndividualID()) || nextMember.getMomID().equals(currentInd.getIndividualID())){ order.removeElement(nextMember); currentFamily.removeMember(nextMember.getIndividualID()); } } }else{ //knock off my spouse and make my first kid a founder (i.e. "0" for parents) //and remove any other kids order.removeElement(currentFamily.getMember(spouseID)); currentFamily.removeMember(spouseID); peopleinFam = currentFamily.getMemberList(); boolean oneFound = false; while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getDadID().equals(currentInd.getIndividualID()) || nextMember.getMomID().equals(currentInd.getIndividualID())){ if (oneFound){ order.removeElement(nextMember); currentFamily.removeMember(nextMember.getIndividualID()); }else{ nextMember.setDadID("0"); nextMember.setMomID("0"); oneFound = true; } } } } } } } currentFamily.removeMember(currentInd.getIndividualID()); if (currentFamily.getNumMembers() == 0){ //if everyone in a family is gone, we remove it from the list families.remove(currentInd.getFamilyID()); axedFamilies.add(currentInd.getFamilyID()); } } } indList = getOrder(); for (int x = 0; x < indList.size(); x++){ //after we've done all that go through and set the boolean for each person who has any kids currentInd = (Individual)indList.elementAt(x); currentFamily = getFamily(currentInd.getFamilyID()); Enumeration peopleinFam = currentFamily.getMemberList(); while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getMomID().equals(currentInd.getIndividualID()) || nextMember.getDadID().equals(currentInd.getIndividualID())){ currentInd.setHasKids(true); break; } } } CheckData cd = new CheckData(this); Vector results = cd.check(); /*int size = results.size(); for (int i = 0; i < size; i++) { MarkerResult markerResult = (MarkerResult) results.elementAt(i); System.out.println(markerResult.toString()); }*/ this.results = results; return results; }
axedPeople.add(currentInd.getIndividualID());
axedPeople.add(currentInd);
public Vector check() throws PedFileException{ //before we perform the check we want to prune out individuals with too much missing data //or trios which contain individuals with too much missing data Vector indList = getOrder(); Individual currentInd; Family currentFamily; //deal with individuals who are missing too much data for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.elementAt(x); currentFamily = getFamily(currentInd.getFamilyID()); double numMissing = 0; int numMarkers = currentInd.getNumMarkers(); for (int i = 0; i < numMarkers; i++){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == 0 || thisMarker[1] == 0){ numMissing++; } } if (numMissing/numMarkers > Options.getMissingThreshold()){ //this person is missing too much data so remove him and then deal //with his family connections order.removeElement(currentInd); axedPeople.add(currentInd.getIndividualID()); if (currentFamily.getNumMembers() > 1){ //there are more people in this family so deal with relatives appropriately if (currentInd.hasEitherParent()){ //I have parents, so kick out any of my kids. Enumeration peopleinFam = currentFamily.getMemberList(); while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getDadID().equals(currentInd.getIndividualID()) || nextMember.getMomID().equals(currentInd.getIndividualID())){ order.removeElement(nextMember); currentFamily.removeMember(nextMember.getIndividualID()); } } }else{ //I have no parents but need to check if my spouse does String spouseID = ""; Enumeration peopleinFam = currentFamily.getMemberList(); while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getDadID().equals(currentInd.getIndividualID())) spouseID = nextMember.getMomID(); if (nextMember.getMomID().equals(currentInd.getIndividualID())) spouseID = nextMember.getDadID(); } if (!spouseID.equals("")){ if (currentFamily.getMember(spouseID).hasEitherParent()){ //remove my kids and leave my spouse alone peopleinFam = currentFamily.getMemberList(); while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getDadID().equals(currentInd.getIndividualID()) || nextMember.getMomID().equals(currentInd.getIndividualID())){ order.removeElement(nextMember); currentFamily.removeMember(nextMember.getIndividualID()); } } }else{ //knock off my spouse and make my first kid a founder (i.e. "0" for parents) //and remove any other kids order.removeElement(currentFamily.getMember(spouseID)); currentFamily.removeMember(spouseID); peopleinFam = currentFamily.getMemberList(); boolean oneFound = false; while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getDadID().equals(currentInd.getIndividualID()) || nextMember.getMomID().equals(currentInd.getIndividualID())){ if (oneFound){ order.removeElement(nextMember); currentFamily.removeMember(nextMember.getIndividualID()); }else{ nextMember.setDadID("0"); nextMember.setMomID("0"); oneFound = true; } } } } } } } currentFamily.removeMember(currentInd.getIndividualID()); if (currentFamily.getNumMembers() == 0){ //if everyone in a family is gone, we remove it from the list families.remove(currentInd.getFamilyID()); axedFamilies.add(currentInd.getFamilyID()); } } } indList = getOrder(); for (int x = 0; x < indList.size(); x++){ //after we've done all that go through and set the boolean for each person who has any kids currentInd = (Individual)indList.elementAt(x); currentFamily = getFamily(currentInd.getFamilyID()); Enumeration peopleinFam = currentFamily.getMemberList(); while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getMomID().equals(currentInd.getIndividualID()) || nextMember.getDadID().equals(currentInd.getIndividualID())){ currentInd.setHasKids(true); break; } } } CheckData cd = new CheckData(this); Vector results = cd.check(); /*int size = results.size(); for (int i = 0; i < size; i++) { MarkerResult markerResult = (MarkerResult) results.elementAt(i); System.out.println(markerResult.toString()); }*/ this.results = results; return results; }
axedPeople.add(nextMember);
public Vector check() throws PedFileException{ //before we perform the check we want to prune out individuals with too much missing data //or trios which contain individuals with too much missing data Vector indList = getOrder(); Individual currentInd; Family currentFamily; //deal with individuals who are missing too much data for(int x=0; x < indList.size(); x++){ currentInd = (Individual)indList.elementAt(x); currentFamily = getFamily(currentInd.getFamilyID()); double numMissing = 0; int numMarkers = currentInd.getNumMarkers(); for (int i = 0; i < numMarkers; i++){ byte[] thisMarker = currentInd.getMarker(i); if (thisMarker[0] == 0 || thisMarker[1] == 0){ numMissing++; } } if (numMissing/numMarkers > Options.getMissingThreshold()){ //this person is missing too much data so remove him and then deal //with his family connections order.removeElement(currentInd); axedPeople.add(currentInd.getIndividualID()); if (currentFamily.getNumMembers() > 1){ //there are more people in this family so deal with relatives appropriately if (currentInd.hasEitherParent()){ //I have parents, so kick out any of my kids. Enumeration peopleinFam = currentFamily.getMemberList(); while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getDadID().equals(currentInd.getIndividualID()) || nextMember.getMomID().equals(currentInd.getIndividualID())){ order.removeElement(nextMember); currentFamily.removeMember(nextMember.getIndividualID()); } } }else{ //I have no parents but need to check if my spouse does String spouseID = ""; Enumeration peopleinFam = currentFamily.getMemberList(); while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getDadID().equals(currentInd.getIndividualID())) spouseID = nextMember.getMomID(); if (nextMember.getMomID().equals(currentInd.getIndividualID())) spouseID = nextMember.getDadID(); } if (!spouseID.equals("")){ if (currentFamily.getMember(spouseID).hasEitherParent()){ //remove my kids and leave my spouse alone peopleinFam = currentFamily.getMemberList(); while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getDadID().equals(currentInd.getIndividualID()) || nextMember.getMomID().equals(currentInd.getIndividualID())){ order.removeElement(nextMember); currentFamily.removeMember(nextMember.getIndividualID()); } } }else{ //knock off my spouse and make my first kid a founder (i.e. "0" for parents) //and remove any other kids order.removeElement(currentFamily.getMember(spouseID)); currentFamily.removeMember(spouseID); peopleinFam = currentFamily.getMemberList(); boolean oneFound = false; while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getDadID().equals(currentInd.getIndividualID()) || nextMember.getMomID().equals(currentInd.getIndividualID())){ if (oneFound){ order.removeElement(nextMember); currentFamily.removeMember(nextMember.getIndividualID()); }else{ nextMember.setDadID("0"); nextMember.setMomID("0"); oneFound = true; } } } } } } } currentFamily.removeMember(currentInd.getIndividualID()); if (currentFamily.getNumMembers() == 0){ //if everyone in a family is gone, we remove it from the list families.remove(currentInd.getFamilyID()); axedFamilies.add(currentInd.getFamilyID()); } } } indList = getOrder(); for (int x = 0; x < indList.size(); x++){ //after we've done all that go through and set the boolean for each person who has any kids currentInd = (Individual)indList.elementAt(x); currentFamily = getFamily(currentInd.getFamilyID()); Enumeration peopleinFam = currentFamily.getMemberList(); while (peopleinFam.hasMoreElements()){ Individual nextMember = currentFamily.getMember((String)peopleinFam.nextElement()); if (nextMember.getMomID().equals(currentInd.getIndividualID()) || nextMember.getDadID().equals(currentInd.getIndividualID())){ currentInd.setHasKids(true); break; } } } CheckData cd = new CheckData(this); Vector results = cd.check(); /*int size = results.size(); for (int i = 0; i < size; i++) { MarkerResult markerResult = (MarkerResult) results.elementAt(i); System.out.println(markerResult.toString()); }*/ this.results = results; return results; }
ExportThread( ExportSelectedAction owner, Collection selection, String fnameFormat, int width, int height ) { this.selection = selection;
ExportThread( ExportSelectedAction owner, PhotoInfo[] exportPhotos, String fnameFormat, int width, int height ) { this.exportPhotos = exportPhotos;
ExportThread( ExportSelectedAction owner, Collection selection, String fnameFormat, int width, int height ) { this.selection = selection; this.format = fnameFormat; this.exportWidth = width; this.exportHeight = height; this.owner = owner; }
Iterator iter = selection.iterator(); int n = 1; while ( iter.hasNext() ) { String fname = String.format( format, new Integer( n ) ); int percent = (n-1) * 100 / selection.size();
for ( int n = 0; n < exportPhotos.length; n++ ) { String fname = String.format( format, new Integer( n+1 ) ); int percent = (n) * 100 / exportPhotos.length;
public void run() { Iterator iter = selection.iterator(); int n = 1; while ( iter.hasNext() ) { String fname = String.format( format, new Integer( n ) ); int percent = (n-1) * 100 / selection.size(); owner.exportingPhoto( this, fname, percent ); File f = new File( fname ); PhotoInfo photo = (PhotoInfo) iter.next(); photo.exportPhoto( f, exportWidth, exportHeight ); n++; } owner.exportDone( this ); }
PhotoInfo photo = (PhotoInfo) iter.next();
PhotoInfo photo = exportPhotos[n];
public void run() { Iterator iter = selection.iterator(); int n = 1; while ( iter.hasNext() ) { String fname = String.format( format, new Integer( n ) ); int percent = (n-1) * 100 / selection.size(); owner.exportingPhoto( this, fname, percent ); File f = new File( fname ); PhotoInfo photo = (PhotoInfo) iter.next(); photo.exportPhoto( f, exportWidth, exportHeight ); n++; } owner.exportDone( this ); }
n++;
public void run() { Iterator iter = selection.iterator(); int n = 1; while ( iter.hasNext() ) { String fname = String.format( format, new Integer( n ) ); int percent = (n-1) * 100 / selection.size(); owner.exportingPhoto( this, fname, percent ); File f = new File( fname ); PhotoInfo photo = (PhotoInfo) iter.next(); photo.exportPhoto( f, exportWidth, exportHeight ); n++; } owner.exportDone( this ); }
TreeSet sortedSelection = new TreeSet( comp ); sortedSelection.addAll( selection ); selection = sortedSelection;
Arrays.sort( exportPhotos, comp );
public void actionPerformed( ActionEvent ev ) { File exportFile = null; if ( view.getSelectedCount() > 1 ) { exportFile = new File( "image_$n.jpg" ); } else { exportFile = new File( "image.jpg" ); } ExportDlg dlg = new ExportDlg( null, true ); dlg.setFilename( exportFile.getAbsolutePath() ); int retval = dlg.showDialog(); if ( retval == ExportDlg.EXPORT_OPTION ) { Container c = view.getTopLevelAncestor(); Cursor oldCursor = c.getCursor(); c.setCursor( new Cursor( Cursor.WAIT_CURSOR ) ); String exportFileTmpl = dlg.getFilename(); int exportWidth = dlg.getImgWidth(); int exportHeight = dlg.getImgHeight(); Collection selection = view.getSelection(); if ( selection != null ) { if ( selection.size() > 1 ) { // Ensure that the numbering order is the same is in current view Comparator comp = view.getPhotoOrderComparator(); if ( comp != null ) { TreeSet sortedSelection = new TreeSet( comp ); sortedSelection.addAll( selection ); selection = sortedSelection; } String format = getSequenceFnameFormat( exportFileTmpl ); BrowserWindow w = null; if ( c instanceof BrowserWindow ) { w = (BrowserWindow) c; } ExportThread exporter = new ExportThread( this, selection, format, exportWidth, exportHeight ); Thread t = new Thread( exporter ); setEnabled( false ); t.start(); } else { Iterator iter = selection.iterator(); if ( iter.hasNext() ) { PhotoInfo photo = (PhotoInfo) iter.next(); photo.exportPhoto( new File( exportFileTmpl ), exportWidth, exportHeight ); } } } c.setCursor( oldCursor ); } }
if ( c instanceof BrowserWindow ) { w = (BrowserWindow) c; } ExportThread exporter = new ExportThread( this, selection, format, exportWidth, exportHeight );
ExportThread exporter = new ExportThread( this, exportPhotos, format, exportWidth, exportHeight );
public void actionPerformed( ActionEvent ev ) { File exportFile = null; if ( view.getSelectedCount() > 1 ) { exportFile = new File( "image_$n.jpg" ); } else { exportFile = new File( "image.jpg" ); } ExportDlg dlg = new ExportDlg( null, true ); dlg.setFilename( exportFile.getAbsolutePath() ); int retval = dlg.showDialog(); if ( retval == ExportDlg.EXPORT_OPTION ) { Container c = view.getTopLevelAncestor(); Cursor oldCursor = c.getCursor(); c.setCursor( new Cursor( Cursor.WAIT_CURSOR ) ); String exportFileTmpl = dlg.getFilename(); int exportWidth = dlg.getImgWidth(); int exportHeight = dlg.getImgHeight(); Collection selection = view.getSelection(); if ( selection != null ) { if ( selection.size() > 1 ) { // Ensure that the numbering order is the same is in current view Comparator comp = view.getPhotoOrderComparator(); if ( comp != null ) { TreeSet sortedSelection = new TreeSet( comp ); sortedSelection.addAll( selection ); selection = sortedSelection; } String format = getSequenceFnameFormat( exportFileTmpl ); BrowserWindow w = null; if ( c instanceof BrowserWindow ) { w = (BrowserWindow) c; } ExportThread exporter = new ExportThread( this, selection, format, exportWidth, exportHeight ); Thread t = new Thread( exporter ); setEnabled( false ); t.start(); } else { Iterator iter = selection.iterator(); if ( iter.hasNext() ) { PhotoInfo photo = (PhotoInfo) iter.next(); photo.exportPhoto( new File( exportFileTmpl ), exportWidth, exportHeight ); } } } c.setCursor( oldCursor ); } }
registerTag("assertThrown", AssertThrownTag.class);
public JUnitTagLibrary() { registerTag("assert", AssertTag.class); registerTag("assertEquals", AssertEqualsTag.class); registerTag("fail", FailTag.class); registerTag("run", RunTag.class ); registerTag("case", CaseTag.class ); registerTag("suite", SuiteTag.class ); }
if (select == null) { throw new MissingAttributeException("select");
Object node = xpathContext; if (select != null) { node = select.selectSingleNode(xpathContext);
public void doTag(XMLOutput output) throws Exception { Object xpathContext = getXPathContext(); if (select == null) { throw new MissingAttributeException("select"); } SAXWriter saxWriter = new SAXWriter(output, output); Object node = select.selectSingleNode(xpathContext); if (node instanceof Element) { Element element = (Element) node; throw new JellyException( "Not implemented yet!" );/** ### need to add these methods to dom4j saxWriter.writeOpen(element); saxWriter.writeClose(element);*/ } else if (node instanceof Node) { saxWriter.write((Node) node); } else if (node != null) { output.write(node.toString()); } }
SAXWriter saxWriter = new SAXWriter(output, output); Object node = select.selectSingleNode(xpathContext); if (node instanceof Element) { Element element = (Element) node; throw new JellyException( "Not implemented yet!" ); /** ### need to add these methods to dom4j
if ( node instanceof Element ) { Element element = (Element) node; SAXWriter saxWriter = new SAXWriter(output, output);
public void doTag(XMLOutput output) throws Exception { Object xpathContext = getXPathContext(); if (select == null) { throw new MissingAttributeException("select"); } SAXWriter saxWriter = new SAXWriter(output, output); Object node = select.selectSingleNode(xpathContext); if (node instanceof Element) { Element element = (Element) node; throw new JellyException( "Not implemented yet!" );/** ### need to add these methods to dom4j saxWriter.writeOpen(element); saxWriter.writeClose(element);*/ } else if (node instanceof Node) { saxWriter.write((Node) node); } else if (node != null) { output.write(node.toString()); } }
*/ } else if (node instanceof Node) { saxWriter.write((Node) node); } else if (node != null) { output.write(node.toString()); }
} else { invokeBody(output); }
public void doTag(XMLOutput output) throws Exception { Object xpathContext = getXPathContext(); if (select == null) { throw new MissingAttributeException("select"); } SAXWriter saxWriter = new SAXWriter(output, output); Object node = select.selectSingleNode(xpathContext); if (node instanceof Element) { Element element = (Element) node; throw new JellyException( "Not implemented yet!" );/** ### need to add these methods to dom4j saxWriter.writeOpen(element); saxWriter.writeClose(element);*/ } else if (node instanceof Node) { saxWriter.write((Node) node); } else if (node != null) { output.write(node.toString()); } }
Writer writer = ( args.length > 1 )
final Writer writer = ( args.length > 1 )
public static void main(String[] args) throws Exception { try { if (args.length <= 0) { System.out.println("Usage: Jelly scriptFile [outputFile]"); return; } Jelly jelly = new Jelly(); jelly.setScript(args[0]); // later we might wanna add some command line arguments // checking stuff using commons-cli to specify the output file // and input file via command line arguments Writer writer = ( args.length > 1 ) ? new FileWriter( args[1] ) : new OutputStreamWriter( System.out ); Script script = jelly.compileScript(); XMLOutput output = XMLOutput.createXMLOutput(writer); // add the system properties and the command line arguments JellyContext context = jelly.getJellyContext(); context.setVariable("args", args); script.run(context, output); writer.close(); } catch (JellyException e) { Throwable cause = e.getCause(); if ( cause != null ) { cause.printStackTrace(); } else { e.printStackTrace(); } } }
writer.close();
Runtime.getRuntime().addShutdownHook( new Thread() { public void run() { try { writer.close(); } catch (Exception e) { } } } );
public static void main(String[] args) throws Exception { try { if (args.length <= 0) { System.out.println("Usage: Jelly scriptFile [outputFile]"); return; } Jelly jelly = new Jelly(); jelly.setScript(args[0]); // later we might wanna add some command line arguments // checking stuff using commons-cli to specify the output file // and input file via command line arguments Writer writer = ( args.length > 1 ) ? new FileWriter( args[1] ) : new OutputStreamWriter( System.out ); Script script = jelly.compileScript(); XMLOutput output = XMLOutput.createXMLOutput(writer); // add the system properties and the command line arguments JellyContext context = jelly.getJellyContext(); context.setVariable("args", args); script.run(context, output); writer.close(); } catch (JellyException e) { Throwable cause = e.getCause(); if ( cause != null ) { cause.printStackTrace(); } else { e.printStackTrace(); } } }
invokeBody( output);
public void doTag(XMLOutput output) throws Exception { String localName = null; int idx = name.indexOf(':'); if (idx >= 0) { localName = name.substring(idx + 1); } else { localName = name; } output.startElement(namespace, localName, name, attributes); invokeBody( output); output.endElement(namespace, localName, name); }
attributes.clear();
public void doTag(XMLOutput output) throws Exception { String localName = null; int idx = name.indexOf(':'); if (idx >= 0) { localName = name.substring(idx + 1); } else { localName = name; } output.startElement(namespace, localName, name, attributes); invokeBody( output); output.endElement(namespace, localName, name); }
attributes.addAttribute("", name, name, "CDATA", value.toString());
attributes.addAttribute("", name, name, "CDATA", value);
public void setAttributeValue( String name, String value ) { // ### we'll assume that all attributes are in no namespace! // ### this is severely limiting! // ### we should be namespace aware int index = attributes.getIndex("", name); if (index >= 0) { attributes.removeAttribute(index); } // treat null values as no attribute if (value != null) { attributes.addAttribute("", name, name, "CDATA", value.toString()); } }
"Could not delete the column " + sc.getName() + " because it is part of a relationship key. Continue" + " deleting of other selected columns?",
"Could not delete the column " + sc.getName() + " because it is part of\n" + "the relationship \""+ex.getLockingRelationship()+"\".\n\n" + "Continue deleting remaining selected columns?",
public void actionPerformed(ActionEvent evt) { logger.debug("delete action detected!"); logger.debug("ACTION COMMAND: " + evt.getActionCommand()); if (evt.getActionCommand().equals(ArchitectSwingConstants.ACTION_COMMAND_SRC_PLAYPEN)) { logger.debug("delete action came from playpen"); List items = pp.getSelectedItems(); if (items.size() < 1) { JOptionPane.showMessageDialog(pp, "No items to delete!"); } if (items.size() > 1) { // count how many relationships and tables there are int tCount = pp.getSelectedTables().size(); int rCount = pp.getSelectedRelationShips().size(); int decision = JOptionPane.showConfirmDialog(pp, "Are you sure you want to delete these " +tCount+" tables and "+rCount+" relationships?", "Multiple Delete", JOptionPane.YES_NO_OPTION); if (decision == JOptionPane.NO_OPTION) { return; } } else { // single selection, so we might be deleting columns boolean deletingColumns = false; Selectable item = (Selectable) items.get(0); if (item instanceof TablePane) { // make a list of columns to delete TablePane tp = (TablePane) item; ArrayList selectedColumns = new ArrayList(); try { for (int i=0; i < tp.getModel().getColumns().size(); i++) { if (tp.isColumnSelected(i)) { deletingColumns = true; // don't fall through into Table/Relationship delete logic selectedColumns.add(tp.getModel().getColumn(i)); } } } catch (ArchitectException ae) { JOptionPane.showMessageDialog(pp, ae.getMessage()); return; } pp.fireUndoCompoundEvent(new UndoCompoundEvent(this,EventTypes.MULTI_SELECT_START,"Starting multi-select")); // now, delete the columns Iterator it2 = selectedColumns.iterator(); while (it2.hasNext()) { SQLColumn sc = (SQLColumn) it2.next(); try { tp.getModel().removeColumn(sc); } catch (LockedColumnException ex) { int decision = JOptionPane.showConfirmDialog(pp, "Could not delete the column " + sc.getName() + " because it is part of a relationship key. Continue" + " deleting of other selected columns?", "Column is Locked", JOptionPane.YES_NO_OPTION); if (decision == JOptionPane.NO_OPTION) { return; } } catch (ArchitectException e) { logger.error("Unexpected exception encountered when attempting to delete column '"+ sc+"' of table '"+sc.getParentTable()+"'"); ASUtils.showExceptionDialog(pp, "Encountered a Problem Deleting the column", e); } } pp.fireUndoCompoundEvent(new UndoCompoundEvent(this,EventTypes.MULTI_SELECT_END,"Ending multi-select")); } if (deletingColumns) { // we tried to delete 1 or more columns, so don't try to delete the table return; } } pp.fireUndoCompoundEvent(new UndoCompoundEvent(this,EventTypes.MULTI_SELECT_START,"Starting multi-select")); // items.size() > 0, user has OK'ed the delete Iterator it = items.iterator(); while (it.hasNext()) { Selectable item = (Selectable) it.next(); logger.debug("next item for delete is: " + item.getClass().getName()); if (item instanceof TablePane) { TablePane tp = (TablePane) item; tp.setSelected(false); pp.db.removeChild(tp.getModel()); if (logger.isDebugEnabled()) { logger.debug("removing element from tableNames set: " + tp.getModel().getName()); logger.debug("before delete: " + pp.tableNames.toArray()); } pp.tableNames.remove(tp.getModel().getName().toLowerCase()); if (logger.isDebugEnabled()) { logger.debug("after delete: " + pp.tableNames.toArray()); } } else if (item instanceof Relationship) { Relationship r = (Relationship) item; logger.debug("trying to delete relationship " + r); r.setSelected(false); SQLRelationship sr = r.getModel(); sr.getPkTable().removeExportedKey(sr); sr.getFkTable().removeImportedKey(sr); } else { JOptionPane.showMessageDialog((JComponent) item, "The selected item type is not recognised"); } } pp.fireUndoCompoundEvent(new UndoCompoundEvent(this,EventTypes.MULTI_SELECT_END,"Ending multi-select")); } else if (evt.getActionCommand().equals(ArchitectSwingConstants.ACTION_COMMAND_SRC_DBTREE)) { logger.debug("delete action came from dbtree"); TreePath [] selections = dbt.getSelectionPaths(); if (selections.length > 1) { int decision = JOptionPane.showConfirmDialog(dbt, "Are you sure you want to delete the " +selections.length+" selected items?", "Multiple Delete", JOptionPane.YES_NO_OPTION); if (decision == JOptionPane.NO_OPTION) { return; } } pp.fireUndoCompoundEvent(new UndoCompoundEvent(this,EventTypes.MULTI_SELECT_START,"Starting multi-select")); // FIXME: parts of the following code look like they were cut'n'pasted from above... PURE EVIL! Iterator it = Arrays.asList(selections).iterator(); while (it.hasNext()) { TreePath tp = (TreePath) it.next(); SQLObject so = (SQLObject) tp.getLastPathComponent(); if (so instanceof SQLTable) { SQLTable st = (SQLTable) so; pp.db.removeChild(st); pp.tableNames.remove(st.getName().toLowerCase()); } else if (so instanceof SQLColumn) { SQLColumn sc = (SQLColumn)so; SQLTable st = sc.getParentTable(); try { st.removeColumn(sc); } catch (LockedColumnException ex) { int decision = JOptionPane.showConfirmDialog(dbt, "Could not delete the column " + sc.getName() + " because it is part of a relationship key. Continue" + " deleting of other selected items?", "Column is Locked", JOptionPane.YES_NO_OPTION); if (decision == JOptionPane.NO_OPTION) { return; } } catch (ArchitectException e) { logger.error("Unexpected exception encountered when attempting to delete column '"+ sc+"' of table '"+sc.getParentTable()+"'"); ASUtils.showExceptionDialog(pp, "Encountered a Problem Deleting the column", e); } } else if (so instanceof SQLRelationship) { SQLRelationship sr = (SQLRelationship) so; sr.getPkTable().removeExportedKey(sr); sr.getFkTable().removeImportedKey(sr); } else { JOptionPane.showMessageDialog(dbt, "The selected SQLObject type is not recognised: " + so.getClass().getName()); } } pp.fireUndoCompoundEvent(new UndoCompoundEvent(this,EventTypes.MULTI_SELECT_END,"Ending multi-select")); } else { logger.debug("delete action came from unknown source, so we do nothing."); // unknown action command source, do nothing } }
logger.debug("add column mapping: " + pkColumn.getName() + " to " + fkColumn.getName() );
public void addMapping(SQLColumn pkColumn, SQLColumn fkColumn) throws ArchitectException { ColumnMapping cmap = new ColumnMapping(); cmap.setPkColumn(pkColumn); cmap.setFkColumn(fkColumn); addChild(cmap); }
RenderedImage thumbImage = JAI.create( "affine", scaleParams );
PlanarImage thumbImage = JAI.create( "affine", scaleParams );
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail for " + uid ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.debug( "Found original, reading it..." ); // Read the image PlanarImage origImage = null; origImage = JAI.create( "fileload", original.getImageFile().getAbsolutePath() ); if ( origImage == null ) { log.warn( "Error reading image " ); txw.abort(); return; } log.debug( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.debug( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); ParameterBlockJAI scaleParams = new ParameterBlockJAI( "affine" ); scaleParams.addSource( origImage ); scaleParams.setParameter( "transform", xform ); scaleParams.setParameter( "interpolation", Interpolation.getInstance( Interpolation.INTERP_NEAREST ) ); RenderedImage thumbImage = JAI.create( "affine", scaleParams ); // Save it FileOutputStream out = null; try { out = new FileOutputStream(thumbnailFile.getAbsolutePath()); } catch(IOException e) { log.error( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } JPEGEncodeParam encodeParam = new JPEGEncodeParam(); ImageEncoder encoder = ImageCodec.createImageEncoder("JPEG", out, encodeParam); try { encoder.encode( thumbImage ); out.close(); } catch (IOException e) { log.error( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.debug( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); log.debug( "Thumbnail loaded" ); txw.commit(); }
origImage.dispose(); thumbImage.dispose();
protected void createThumbnail( Volume volume ) { log.debug( "Creating thumbnail for " + uid ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.debug( "Found original, reading it..." ); // Read the image PlanarImage origImage = null; origImage = JAI.create( "fileload", original.getImageFile().getAbsolutePath() ); if ( origImage == null ) { log.warn( "Error reading image " ); txw.abort(); return; } log.debug( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.debug( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); int maxThumbWidth = 100; int maxThumbHeight = 100; AffineTransform xform = photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, prefRotation -original.getRotated(), origWidth, origHeight ); ParameterBlockJAI scaleParams = new ParameterBlockJAI( "affine" ); scaleParams.addSource( origImage ); scaleParams.setParameter( "transform", xform ); scaleParams.setParameter( "interpolation", Interpolation.getInstance( Interpolation.INTERP_NEAREST ) ); RenderedImage thumbImage = JAI.create( "affine", scaleParams ); // Save it FileOutputStream out = null; try { out = new FileOutputStream(thumbnailFile.getAbsolutePath()); } catch(IOException e) { log.error( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } JPEGEncodeParam encodeParam = new JPEGEncodeParam(); ImageEncoder encoder = ImageCodec.createImageEncoder("JPEG", out, encodeParam); try { encoder.encode( thumbImage ); out.close(); } catch (IOException e) { log.error( "Error writing thumbnail: " + e.getMessage() ); txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); log.debug( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); log.debug( "Thumbnail loaded" ); txw.commit(); }
assertEquals( "PhotoInfo & origInstance hashes differ", hash, instanceHash );
assertTrue( "PhotoInfo & origInstance hashes differ", Arrays.equals( hash, instanceHash ) );
public void testOriginalHash() { String fname = "test1.jpg"; File f = new File( testImgDir, fname ); PhotoInfo photo = null; try { photo = PhotoInfo.addToDB( f ); } catch ( PhotoNotFoundException e ) { fail( "Could not find photo: " + e.getMessage() ); } byte hash[] = photo.getOrigInstanceHash(); byte instanceHash[] = null; assertNotNull( "No hash for original photo", hash ); // If the original instance is deleted the hash value should still remain while ( photo.getNumInstances() > 0 ) { ImageInstance i = photo.getInstance( 0 ); photo.removeInstance( 0 ); if ( i.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { instanceHash = i.getHash(); } i.delete(); } assertEquals( "PhotoInfo & origInstance hashes differ", hash, instanceHash ); byte hash2[] = photo.getOrigInstanceHash(); assertEquals( "Hash after deleting instances is changed", hash, hash2 ); photo.delete(); }
assertEquals( "Hash after deleting instances is changed", hash, hash2 );
assertTrue( "Hash after deleting instances is changed", Arrays.equals( hash, hash2 ) );
public void testOriginalHash() { String fname = "test1.jpg"; File f = new File( testImgDir, fname ); PhotoInfo photo = null; try { photo = PhotoInfo.addToDB( f ); } catch ( PhotoNotFoundException e ) { fail( "Could not find photo: " + e.getMessage() ); } byte hash[] = photo.getOrigInstanceHash(); byte instanceHash[] = null; assertNotNull( "No hash for original photo", hash ); // If the original instance is deleted the hash value should still remain while ( photo.getNumInstances() > 0 ) { ImageInstance i = photo.getInstance( 0 ); photo.removeInstance( 0 ); if ( i.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { instanceHash = i.getHash(); } i.delete(); } assertEquals( "PhotoInfo & origInstance hashes differ", hash, instanceHash ); byte hash2[] = photo.getOrigInstanceHash(); assertEquals( "Hash after deleting instances is changed", hash, hash2 ); photo.delete(); }
adminProps.setProperty(AdminClient.USERNAME, config.getUsername()); adminProps.setProperty(AdminClient.PASSWORD, config.getPassword());
if(config.getUsername() != null && config.getUsername().trim().length() > 0){ adminProps.setProperty(AdminClient.USERNAME, config.getUsername()); adminProps.setProperty(AdminClient.PASSWORD, config.getPassword()); }
public ServerConnection getServerConnection(ApplicationConfig config) throws ConnectionFailedException { /* Initialize the AdminClient */ Properties adminProps = new Properties(); adminProps.setProperty(AdminClient.CONNECTOR_TYPE, AdminClient.CONNECTOR_TYPE_SOAP); adminProps.setProperty(AdminClient.CONNECTOR_HOST, config.getHost()); adminProps.setProperty(AdminClient.CONNECTOR_PORT, config.getPort().toString()); adminProps.setProperty(AdminClient.USERNAME, config.getUsername()); adminProps.setProperty(AdminClient.PASSWORD, config.getPassword()); try{ AdminClient adminClient = AdminClientFactory.createAdminClient(adminProps); return new WebSphereServerConnection(adminClient); }catch(Throwable e){ throw new ConnectionFailedException(e); } }
if (e.getSource() instanceof SQLColumn) { ColumnMapping m = getMappingByPkCol((SQLColumn) e.getSource()); String prop = e.getPropertyName(); if (prop == null || prop.equals("parent") || prop.equals("primaryKeySeq") || prop.equals("remarks")) { } else if (prop.equals("sourceColumn")) { m.getFkColumn().setSourceColumn(m.getPkColumn().getSourceColumn()); } else if (prop.equals("columnName")) { m.getFkColumn().setColumnName(m.getPkColumn().getColumnName()); } else if (prop.equals("type")) { m.getFkColumn().setType(m.getPkColumn().getType()); } else if (prop.equals("sourceDBTypeName")) { m.getFkColumn().setSourceDBTypeName(m.getPkColumn().getSourceDBTypeName()); } else if (prop.equals("scale")) { m.getFkColumn().setScale(m.getPkColumn().getScale()); } else if (prop.equals("precision")) { m.getFkColumn().setPrecision(m.getPkColumn().getPrecision()); } else if (prop.equals("nullable")) { m.getFkColumn().setNullable(m.getPkColumn().getNullable()); } else if (prop.equals("defaultValue")) { m.getFkColumn().setDefaultValue(m.getPkColumn().getDefaultValue()); } else if (prop.equals("autoIncrement")) { m.getFkColumn().setAutoIncrement(m.getPkColumn().isAutoIncrement()); } else { logger.warn("Warning: unknown column property "+prop +" changed while monitoring pkTable"); } }
public void dbObjectChanged(SQLObjectEvent e) { }
try { Iterator it = pkTable.getColumns().iterator(); while (it.hasNext()) { SQLColumn col = (SQLColumn) it.next(); if (col.getPrimaryKeySeq() != null) { ensureInMapping(col); } else { ensureNotInMapping(col); } } } catch (ArchitectException ex) { logger.warn("Coulnd't re-scan table as a result of dbStructureChanged", ex); }
public void dbStructureChanged(SQLObjectEvent e) { }
fkcol = SQLColumn.getDerivedInstance(pkcol, fkTable);
fkcol = (SQLColumn) pkcol.clone();
protected void ensureInMapping(SQLColumn pkcol) throws ArchitectException { if (!containsPkColumn(pkcol)) { SQLColumn fkcol = fkTable.getColumnByName(pkcol.getName()); if (fkcol == null) { fkcol = SQLColumn.getDerivedInstance(pkcol, fkTable); fkTable.addColumn(fkcol); if (identifying) { fkcol.setPrimaryKeySeq(new Integer(fkTable.pkSize())); } } addMapping(pkcol, fkcol); } }
public void setIdentifying(boolean argIdentifying) { this.identifying = argIdentifying; fireDbObjectChanged("identifying");
public void setIdentifying(boolean argIdentifying) throws ArchitectException { if (identifying != argIdentifying) { identifying = argIdentifying; fireDbObjectChanged("identifying"); if (identifying) { Iterator mappings = getChildren().iterator(); while (mappings.hasNext()) { ColumnMapping m = (ColumnMapping) mappings.next(); if (m.getFkColumn().getPrimaryKeySeq() == null) { m.getFkColumn().setPrimaryKeySeq(new Integer(fkTable.pkSize())); } } } else { Iterator mappings = getChildren().iterator(); while (mappings.hasNext()) { ColumnMapping m = (ColumnMapping) mappings.next(); if (m.getFkColumn().getPrimaryKeySeq() != null) { m.getFkColumn().setPrimaryKeySeq(null); } } } }
public void setIdentifying(boolean argIdentifying) { this.identifying = argIdentifying; fireDbObjectChanged("identifying"); }
int[] count = new int[5];
int[] count = new int[6];
private MarkerResult checkMarker(int loc)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; int missing=0, founderHetCount=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0, haploid = 0; Hashtable founderGenoCount = new Hashtable(); Hashtable kidgeno = new Hashtable(); int[] founderHomCount = new int[5]; Vector mendels = new Vector(); int[] count = new int[5]; for(int i=0;i<5;i++) { founderHomCount[i] =0; count[i]=0; } //loop through each family, check data for marker loc Enumeration famList = pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); //if haploid, check for male hets if(Chromosome.getDataChrom().equalsIgnoreCase("chrx") && currentInd.getGender()==1){ if(allele1 != allele2) { currentInd.zeroOutMarker(loc); pedFile.addHaploidHet(currentInd.getFamilyID() + "\t" + currentInd.getIndividualID() + "\t" + loc); } } //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //do mendel check int momAllele1 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,0); int momAllele2 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,1); int dadAllele1 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,0); int dadAllele2 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,1); if(Chromosome.getDataChrom().equalsIgnoreCase("chrx")){ if (dadAllele1 != dadAllele2){ dadAllele1 = 0; dadAllele2 = 0; } if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ if(currentInd.getGender() == 1) { //this is an x chrom for a male, so the only thing we need to check is if //allele1 matches either momallele1 or momallele2 if(allele1 != momAllele1 && allele1 != momAllele2) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } }else { //if gender is anything except 1 we assume female if(momAllele1 == momAllele2) { //mom hom and dad matches mom if(dadAllele1 == momAllele1) { //kid must be hom same allele if(allele1 != momAllele1 || allele2 != momAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } }else { //kid must be het if(allele1 == allele2 ){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else{ //mom het,so only need to check that at least one allele matches dad if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } } } }else{ //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } } //end mendel check } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getZeroed(loc)){ allele1 = 0; allele2 = 0; }else{ allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); } String familyID = currentInd.getFamilyID(); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has no parents -- i.e. is a founder if(!currentFamily.hasAncestor(currentInd.getIndividualID())){ //set founderGenoCount if(founderGenoCount.containsKey(familyID)){ int value = ((Integer)founderGenoCount.get(familyID)).intValue() +1; founderGenoCount.put(familyID, new Integer(value)); }else{ founderGenoCount.put(familyID, new Integer(1)); } if (allele1 != 9){ //value of 9 means an 'h' allele for haps files... count[allele1]++; } if (!Chromosome.getDataChrom().equalsIgnoreCase("chrx") || currentInd.getGender() != 1) { if(allele1 != allele2 || allele1 == 9 || allele2 == 9) { founderHetCount++; }else{ founderHomCount[allele1]++; } if(allele2 != 9){ count[allele2]++; } } }else{ if(kidgeno.containsKey(familyID)){ int value = ((Integer)kidgeno.get(familyID)).intValue() +1; kidgeno.put(familyID, new Integer(value)); } else{ kidgeno.put(familyID, new Integer(1)); } } if (!Chromosome.getDataChrom().equalsIgnoreCase("chrx") || currentInd.getGender() != 1) { if(allele1 == allele2) { hom++; }else { het++; } }else{ haploid++; } } //missing data else missing++; } currentFamily.setMendErrs(mendErrNum); } double obsHET = getObsHET(het, hom); double freqStuff[] = null; try{ freqStuff = getFreqStuff(count); }catch (PedFileException pfe){ throw new PedFileException("More than two alleles at marker " + (loc+1)); } double preHET = freqStuff[0]; double maf = freqStuff[1]; String minorAllele, majorAllele; if (freqStuff[2] == 1){ minorAllele = "A"; }else if (freqStuff[2] == 2){ minorAllele = "C"; }else if (freqStuff[2] == 3){ minorAllele = "G"; }else{ minorAllele = "T"; } if (freqStuff[3] == 1){ majorAllele = "A"; }else if (freqStuff[3] == 2){ majorAllele = "C"; }else if (freqStuff[3] == 3){ majorAllele = "G"; }else{ majorAllele = "T"; } //HW p value double pvalue = getPValue(founderHomCount, founderHetCount); //geno percent double genopct = getGenoPercent(het, hom, haploid, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(pedFile.getFamList(), founderGenoCount, kidgeno); //rating int rating = this.getRating(genopct, pvalue, mendErrNum,maf); if (mendErrNum > 0 && !pedFile.getMendelsExist()){ pedFile.setMendelsExist(true); } result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setMinorAllele(minorAllele); result.setMajorAllele(majorAllele); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); result.setMendelErrors(mendels); return result; }
if(allele1 == allele2) {
if(allele1 == allele2 && allele1 != 9 && allele2 != 9) {
private MarkerResult checkMarker(int loc)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; int missing=0, founderHetCount=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0, haploid = 0; Hashtable founderGenoCount = new Hashtable(); Hashtable kidgeno = new Hashtable(); int[] founderHomCount = new int[5]; Vector mendels = new Vector(); int[] count = new int[5]; for(int i=0;i<5;i++) { founderHomCount[i] =0; count[i]=0; } //loop through each family, check data for marker loc Enumeration famList = pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); //if haploid, check for male hets if(Chromosome.getDataChrom().equalsIgnoreCase("chrx") && currentInd.getGender()==1){ if(allele1 != allele2) { currentInd.zeroOutMarker(loc); pedFile.addHaploidHet(currentInd.getFamilyID() + "\t" + currentInd.getIndividualID() + "\t" + loc); } } //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //do mendel check int momAllele1 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,0); int momAllele2 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,1); int dadAllele1 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,0); int dadAllele2 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,1); if(Chromosome.getDataChrom().equalsIgnoreCase("chrx")){ if (dadAllele1 != dadAllele2){ dadAllele1 = 0; dadAllele2 = 0; } if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ if(currentInd.getGender() == 1) { //this is an x chrom for a male, so the only thing we need to check is if //allele1 matches either momallele1 or momallele2 if(allele1 != momAllele1 && allele1 != momAllele2) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } }else { //if gender is anything except 1 we assume female if(momAllele1 == momAllele2) { //mom hom and dad matches mom if(dadAllele1 == momAllele1) { //kid must be hom same allele if(allele1 != momAllele1 || allele2 != momAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } }else { //kid must be het if(allele1 == allele2 ){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else{ //mom het,so only need to check that at least one allele matches dad if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } } } }else{ //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } } //end mendel check } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getZeroed(loc)){ allele1 = 0; allele2 = 0; }else{ allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); } String familyID = currentInd.getFamilyID(); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has no parents -- i.e. is a founder if(!currentFamily.hasAncestor(currentInd.getIndividualID())){ //set founderGenoCount if(founderGenoCount.containsKey(familyID)){ int value = ((Integer)founderGenoCount.get(familyID)).intValue() +1; founderGenoCount.put(familyID, new Integer(value)); }else{ founderGenoCount.put(familyID, new Integer(1)); } if (allele1 != 9){ //value of 9 means an 'h' allele for haps files... count[allele1]++; } if (!Chromosome.getDataChrom().equalsIgnoreCase("chrx") || currentInd.getGender() != 1) { if(allele1 != allele2 || allele1 == 9 || allele2 == 9) { founderHetCount++; }else{ founderHomCount[allele1]++; } if(allele2 != 9){ count[allele2]++; } } }else{ if(kidgeno.containsKey(familyID)){ int value = ((Integer)kidgeno.get(familyID)).intValue() +1; kidgeno.put(familyID, new Integer(value)); } else{ kidgeno.put(familyID, new Integer(1)); } } if (!Chromosome.getDataChrom().equalsIgnoreCase("chrx") || currentInd.getGender() != 1) { if(allele1 == allele2) { hom++; }else { het++; } }else{ haploid++; } } //missing data else missing++; } currentFamily.setMendErrs(mendErrNum); } double obsHET = getObsHET(het, hom); double freqStuff[] = null; try{ freqStuff = getFreqStuff(count); }catch (PedFileException pfe){ throw new PedFileException("More than two alleles at marker " + (loc+1)); } double preHET = freqStuff[0]; double maf = freqStuff[1]; String minorAllele, majorAllele; if (freqStuff[2] == 1){ minorAllele = "A"; }else if (freqStuff[2] == 2){ minorAllele = "C"; }else if (freqStuff[2] == 3){ minorAllele = "G"; }else{ minorAllele = "T"; } if (freqStuff[3] == 1){ majorAllele = "A"; }else if (freqStuff[3] == 2){ majorAllele = "C"; }else if (freqStuff[3] == 3){ majorAllele = "G"; }else{ majorAllele = "T"; } //HW p value double pvalue = getPValue(founderHomCount, founderHetCount); //geno percent double genopct = getGenoPercent(het, hom, haploid, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(pedFile.getFamList(), founderGenoCount, kidgeno); //rating int rating = this.getRating(genopct, pvalue, mendErrNum,maf); if (mendErrNum > 0 && !pedFile.getMendelsExist()){ pedFile.setMendelsExist(true); } result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setMinorAllele(minorAllele); result.setMajorAllele(majorAllele); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); result.setMendelErrors(mendels); return result; }
int numHets = count[5]; count[5] = 0; if (numHets > 0){ int numAlleles = 0; for (int i = 1; i < count.length-1; i++){ if (count[i] > 0){ numAlleles++; } } if (numAlleles == 0){ count[1] += numHets/2; count[3] += numHets/2; }else if (numAlleles == 1){ for (int i = 1; i < count.length-1; i++){ if (count[i] > 0){ count[i] += numHets/2; if (i == 4){ count[3] += numHets/2; }else{ count[i+1] += numHets/2; } break; } } }else if (numAlleles == 2){ for (int i = 1; i < count.length -1; i++){ if (count[i] > 0){ count[i] += numHets/2; } } } }
private MarkerResult checkMarker(int loc)throws PedFileException{ MarkerResult result = new MarkerResult(); Individual currentInd; int missing=0, founderHetCount=0, mendErrNum=0; int allele1=0, allele2=0, hom=0, het=0, haploid = 0; Hashtable founderGenoCount = new Hashtable(); Hashtable kidgeno = new Hashtable(); int[] founderHomCount = new int[5]; Vector mendels = new Vector(); int[] count = new int[5]; for(int i=0;i<5;i++) { founderHomCount[i] =0; count[i]=0; } //loop through each family, check data for marker loc Enumeration famList = pedFile.getFamList(); while(famList.hasMoreElements()){ Family currentFamily = pedFile.getFamily((String)famList.nextElement()); Enumeration indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); //if haploid, check for male hets if(Chromosome.getDataChrom().equalsIgnoreCase("chrx") && currentInd.getGender()==1){ if(allele1 != allele2) { currentInd.zeroOutMarker(loc); pedFile.addHaploidHet(currentInd.getFamilyID() + "\t" + currentInd.getIndividualID() + "\t" + loc); } } //no allele data missing if(allele1 > 0 && allele2 >0){ //make sure entry has parents if (currentFamily.containsMember(currentInd.getMomID()) && currentFamily.containsMember(currentInd.getDadID())){ //do mendel check int momAllele1 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,0); int momAllele2 = (currentFamily.getMember(currentInd.getMomID())).getAllele(loc,1); int dadAllele1 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,0); int dadAllele2 = (currentFamily.getMember(currentInd.getDadID())).getAllele(loc,1); if(Chromosome.getDataChrom().equalsIgnoreCase("chrx")){ if (dadAllele1 != dadAllele2){ dadAllele1 = 0; dadAllele2 = 0; } if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ if(currentInd.getGender() == 1) { //this is an x chrom for a male, so the only thing we need to check is if //allele1 matches either momallele1 or momallele2 if(allele1 != momAllele1 && allele1 != momAllele2) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } }else { //if gender is anything except 1 we assume female if(momAllele1 == momAllele2) { //mom hom and dad matches mom if(dadAllele1 == momAllele1) { //kid must be hom same allele if(allele1 != momAllele1 || allele2 != momAllele2){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } }else { //kid must be het if(allele1 == allele2 ){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } }else{ //mom het,so only need to check that at least one allele matches dad if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } } } }else{ //don't check if parents are missing any data if (!(momAllele1 == 0 || momAllele2 == 0 || dadAllele1 == 0 || dadAllele2 ==0)){ //mom hom if(momAllele1 == momAllele2){ //both parents hom if (dadAllele1 == dadAllele2){ //both parents hom same allele if (momAllele1 == dadAllele1){ //kid must be hom same allele if (allele1 != momAllele1 || allele2 != momAllele1) { mendErrNum ++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } //parents hom diff allele }else{ //kid must be het if (allele1 == allele2) { mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom hom dad het }else{ //kid can't be hom for non-momallele if (allele1 != momAllele1 && allele2 != momAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //mom het }else{ //dad hom if (dadAllele1 == dadAllele2){ //kid can't be hom for non-dadallele if(allele1 != dadAllele1 && allele2 != dadAllele1){ mendErrNum++; MendelError mend = new MendelError(currentInd.getFamilyID(), currentInd.getIndividualID()); mendels.add(mend); currentInd.zeroOutMarker(loc); currentFamily.getMember(currentInd.getMomID()).zeroOutMarker(loc); currentFamily.getMember(currentInd.getDadID()).zeroOutMarker(loc); } } //both parents het no mend err poss } } } } //end mendel check } } indList = currentFamily.getMemberList(); //loop through each individual in the current Family while(indList.hasMoreElements()){ currentInd = currentFamily.getMember((String)indList.nextElement()); if (currentInd.getZeroed(loc)){ allele1 = 0; allele2 = 0; }else{ allele1 = currentInd.getAllele(loc,0); allele2 = currentInd.getAllele(loc,1); } String familyID = currentInd.getFamilyID(); //no allele data missing if(allele1 > 0 && allele2 >0){ //indiv has no parents -- i.e. is a founder if(!currentFamily.hasAncestor(currentInd.getIndividualID())){ //set founderGenoCount if(founderGenoCount.containsKey(familyID)){ int value = ((Integer)founderGenoCount.get(familyID)).intValue() +1; founderGenoCount.put(familyID, new Integer(value)); }else{ founderGenoCount.put(familyID, new Integer(1)); } if (allele1 != 9){ //value of 9 means an 'h' allele for haps files... count[allele1]++; } if (!Chromosome.getDataChrom().equalsIgnoreCase("chrx") || currentInd.getGender() != 1) { if(allele1 != allele2 || allele1 == 9 || allele2 == 9) { founderHetCount++; }else{ founderHomCount[allele1]++; } if(allele2 != 9){ count[allele2]++; } } }else{ if(kidgeno.containsKey(familyID)){ int value = ((Integer)kidgeno.get(familyID)).intValue() +1; kidgeno.put(familyID, new Integer(value)); } else{ kidgeno.put(familyID, new Integer(1)); } } if (!Chromosome.getDataChrom().equalsIgnoreCase("chrx") || currentInd.getGender() != 1) { if(allele1 == allele2) { hom++; }else { het++; } }else{ haploid++; } } //missing data else missing++; } currentFamily.setMendErrs(mendErrNum); } double obsHET = getObsHET(het, hom); double freqStuff[] = null; try{ freqStuff = getFreqStuff(count); }catch (PedFileException pfe){ throw new PedFileException("More than two alleles at marker " + (loc+1)); } double preHET = freqStuff[0]; double maf = freqStuff[1]; String minorAllele, majorAllele; if (freqStuff[2] == 1){ minorAllele = "A"; }else if (freqStuff[2] == 2){ minorAllele = "C"; }else if (freqStuff[2] == 3){ minorAllele = "G"; }else{ minorAllele = "T"; } if (freqStuff[3] == 1){ majorAllele = "A"; }else if (freqStuff[3] == 2){ majorAllele = "C"; }else if (freqStuff[3] == 3){ majorAllele = "G"; }else{ majorAllele = "T"; } //HW p value double pvalue = getPValue(founderHomCount, founderHetCount); //geno percent double genopct = getGenoPercent(het, hom, haploid, missing); // num of families with a fully genotyped trio //int famTrio =0; int famTrio = getNumOfFamTrio(pedFile.getFamList(), founderGenoCount, kidgeno); //rating int rating = this.getRating(genopct, pvalue, mendErrNum,maf); if (mendErrNum > 0 && !pedFile.getMendelsExist()){ pedFile.setMendelsExist(true); } result.setObsHet(obsHET); result.setPredHet(preHET); result.setMAF(maf); result.setMinorAllele(minorAllele); result.setMajorAllele(majorAllele); result.setHWpvalue(pvalue); result.setGenoPercent(genopct); result.setFamTrioNum(famTrio); result.setMendErrNum(mendErrNum); result.setRating(rating); result.setMendelErrors(mendels); return result; }
else {
else if ( uri != null ) { schema = factory.compileSchema(in, uri); } else{
public void doTag(final XMLOutput output) throws Exception { if ( var == null ) { throw new MissingAttributeException("var"); } if ( factory == null ) { factory = new com.sun.msv.verifier.jarv.TheFactoryImpl(); } InputStream in = null; if ( uri != null ) { in = context.getResourceAsStream( uri ); if ( in == null ) { throw new JellyException( "Could not find resource for uri: " + uri ); } } else { String text = getBodyText(); byte[] data = text.getBytes(); in = new ByteArrayInputStream( text.getBytes() ); } Schema schema = null; if (systemId != null) { schema = factory.compileSchema(in, systemId); } else { schema = factory.compileSchema(in); } if ( schema == null ) { throw new JellyException( "Could not create a valid schema" ); } Verifier verifier = schema.newVerifier(); context.setVariable(var, verifier); }
fc = new JFileChooser(System.getProperty("user.dir"));
try{ fc = new JFileChooser(System.getProperty("user.dir")); }catch(NullPointerException n){ try{ UIManager.setLookAndFeel(UIManager.getCrossPlatformLookAndFeelClassName()); fc = new JFileChooser(System.getProperty("user.dir")); UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); }catch(Exception e){ } }
public HaploView(){ fc = new JFileChooser(System.getProperty("user.dir")); //menu setup JMenuBar menuBar = new JMenuBar(); setJMenuBar(menuBar); JMenuItem menuItem; //file menu JMenu fileMenu = new JMenu("File"); menuBar.add(fileMenu); menuItem = new JMenuItem(READ_GENOTYPES); setAccelerator(menuItem, 'O', false); menuItem.addActionListener(this); fileMenu.add(menuItem); /* viewGenotypesItem = new JMenuItem(VIEW_GENOTYPES); viewGenotypesItem.addActionListener(this); //viewGenotypesItem.setEnabled(false); fileMenu.add(viewGenotypesItem); */ readMarkerItem = new JMenuItem(READ_MARKERS); setAccelerator(readMarkerItem, 'I', false); readMarkerItem.addActionListener(this); readMarkerItem.setEnabled(false); fileMenu.add(readMarkerItem); /* viewMarkerItem = new JMenuItem(VIEW_MARKERS); viewMarkerItem.addActionListener(this); //viewMarkerItem.setEnabled(false); fileMenu.add(viewMarkerItem); */ fileMenu.addSeparator(); exportMenuItems = new JMenuItem[exportItems.length]; for (int i = 0; i < exportItems.length; i++) { exportMenuItems[i] = new JMenuItem(exportItems[i]); exportMenuItems[i].addActionListener(this); exportMenuItems[i].setEnabled(false); fileMenu.add(exportMenuItems[i]); } fileMenu.addSeparator(); fileMenu.setMnemonic(KeyEvent.VK_F); menuItem = new JMenuItem(QUIT); setAccelerator(menuItem, 'Q', false); menuItem.addActionListener(this); fileMenu.add(menuItem); /// display menu JMenu displayMenu = new JMenu("Display"); displayMenu.setMnemonic(KeyEvent.VK_D); menuBar.add(displayMenu); ButtonGroup group = new ButtonGroup(); viewMenuItems = new JRadioButtonMenuItem[viewItems.length]; for (int i = 0; i < viewItems.length; i++) { viewMenuItems[i] = new JRadioButtonMenuItem(viewItems[i], i == 0); viewMenuItems[i].addActionListener(this); KeyStroke ks = KeyStroke.getKeyStroke('1' + i, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()); viewMenuItems[i].setAccelerator(ks); displayMenu.add(viewMenuItems[i]); viewMenuItems[i].setEnabled(false); group.add(viewMenuItems[i]); } displayMenu.addSeparator(); //a submenu ButtonGroup zg = new ButtonGroup(); JMenu zoomMenu = new JMenu("D prime zoom"); zoomMenu.setMnemonic(KeyEvent.VK_Z); zoomMenuItems = new JRadioButtonMenuItem[zoomItems.length]; for (int i = 0; i < zoomItems.length; i++){ zoomMenuItems[i] = new JRadioButtonMenuItem(zoomItems[i], i==0); zoomMenuItems[i].addActionListener(this); zoomMenuItems[i].setActionCommand("zoom" + i); zoomMenu.add(zoomMenuItems[i]); zg.add(zoomMenuItems[i]); } displayMenu.add(zoomMenu); //another submenu ButtonGroup cg = new ButtonGroup(); JMenu colorMenu = new JMenu("D prime color scheme"); colorMenu.setMnemonic(KeyEvent.VK_C); colorMenuItems = new JRadioButtonMenuItem[colorItems.length]; for (int i = 0; i< colorItems.length; i++){ colorMenuItems[i] = new JRadioButtonMenuItem(colorItems[i],i==0); colorMenuItems[i].addActionListener(this); colorMenuItems[i].setActionCommand("color" + i); colorMenu.add(colorMenuItems[i]); cg.add(colorMenuItems[i]); if (i != 0){ colorMenuItems[i].setEnabled(false); } } displayMenu.add(colorMenu); //analysis menu JMenu analysisMenu = new JMenu("Analysis"); analysisMenu.setMnemonic(KeyEvent.VK_A); menuBar.add(analysisMenu); //a submenu ButtonGroup bg = new ButtonGroup(); JMenu blockMenu = new JMenu("Define Blocks"); blockMenu.setMnemonic(KeyEvent.VK_B); blockMenuItems = new JRadioButtonMenuItem[blockItems.length]; for (int i = 0; i < blockItems.length; i++){ blockMenuItems[i] = new JRadioButtonMenuItem(blockItems[i], i==0); blockMenuItems[i].addActionListener(this); blockMenuItems[i].setActionCommand("block" + i); blockMenu.add(blockMenuItems[i]); bg.add(blockMenuItems[i]); if (i != 0){ blockMenuItems[i].setEnabled(false); } } analysisMenu.add(blockMenu); clearBlocksItem = new JMenuItem(CLEAR_BLOCKS); setAccelerator(clearBlocksItem, 'C', false); clearBlocksItem.addActionListener(this); clearBlocksItem.setEnabled(false); analysisMenu.add(clearBlocksItem); JMenuItem customizeBlocksItem = new JMenuItem(CUST_BLOCKS); customizeBlocksItem.addActionListener(this); analysisMenu.add(customizeBlocksItem); //color key keyMenu = new JMenu("Key"); changeKey(1); menuBar.add(Box.createHorizontalGlue()); menuBar.add(keyMenu); /** NEEDS FIXING helpMenu = new JMenu("Help"); menuBar.add(Box.createHorizontalGlue()); menuBar.add(helpMenu); menuItem = new JMenuItem("Tutorial"); menuItem.addActionListener(this); helpMenu.add(menuItem); **/ /* clearBlocksMenuItem.addActionListener(this); clearBlocksMenuItem.setEnabled(false); toolMenu.add(clearBlocksMenuItem); */ addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e){ quit(); } }); addComponentListener(new ResizeListener()); }
JFileChooser fc = new JFileChooser(System.getProperty("user.dir"));
public void actionPerformed(ActionEvent e) { String command = e.getActionCommand(); if (command == READ_GENOTYPES){ ReadDataDialog readDialog = new ReadDataDialog("Open new data", this); readDialog.pack(); readDialog.setVisible(true); } else if (command == READ_MARKERS){ //JFileChooser fc = new JFileChooser(System.getProperty("user.dir")); fc.setSelectedFile(null); int returnVal = fc.showOpenDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { readMarkers(fc.getSelectedFile()); } }else if (command == CUST_BLOCKS){ TweakBlockDefsDialog tweakDialog = new TweakBlockDefsDialog("Customize Blocks", this); tweakDialog.pack(); tweakDialog.setVisible(true); }else if (command == CLEAR_BLOCKS){ colorMenuItems[0].setSelected(true); for (int i = 1; i< colorMenuItems.length; i++){ colorMenuItems[i].setEnabled(false); } changeBlocks(3,1); //blockdef clauses }else if (command.startsWith("block")){ int method = Integer.valueOf(command.substring(5)).intValue(); changeBlocks(method,1); for (int i = 1; i < colorMenuItems.length; i++){ if (method+1 == i){ colorMenuItems[i].setEnabled(true); }else{ colorMenuItems[i].setEnabled(false); } } colorMenuItems[0].setSelected(true); //zooming clauses }else if (command.startsWith("zoom")){ dPrimeDisplay.zoom(Integer.valueOf(command.substring(4)).intValue()); //coloring clauses }else if (command.startsWith("color")){ dPrimeDisplay.refresh(Integer.valueOf(command.substring(5)).intValue()+1); changeKey(Integer.valueOf(command.substring(5)).intValue()+1); //exporting clauses }else if (command == EXPORT_PNG){ JFileChooser fc = new JFileChooser(System.getProperty("user.dir")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), PNG_MODE, fc.getSelectedFile()); } }else if (command == EXPORT_TEXT){ JFileChooser fc = new JFileChooser(System.getProperty("user.dir")); if (fc.showSaveDialog(this) == JFileChooser.APPROVE_OPTION){ export(tabs.getSelectedIndex(), TXT_MODE, fc.getSelectedFile()); } }else if (command == "Select All"){ checkPanel.selectAll(); }else if (command == "Rescore Markers"){ String cut = hwcut.getText(); if (cut.equals("")){ cut = "0"; } CheckData.hwCut = Double.parseDouble(cut); cut = genocut.getText(); if (cut.equals("")){ cut="0"; } CheckData.failedGenoCut = Integer.parseInt(cut); cut = mendcut.getText(); if (cut.equals("")){ cut="0"; } CheckData.numMendErrCut = Integer.parseInt(cut); checkPanel.redoRatings(); }else if (command == "Tutorial"){ showHelp(); } else if (command == QUIT){ quit(); } else { for (int i = 0; i < viewItems.length; i++) { if (command == viewItems[i]) tabs.setSelectedIndex(i); } } }
private static ResourceBundle findMatch(String basename, Locale pref) {
private static ResourceBundle findMatch(String basename, Locale pref, ClassLoader cl) {
private static ResourceBundle findMatch(String basename, Locale pref) { ResourceBundle match = null; try { ResourceBundle bundle = ResourceBundle.getBundle(basename, pref, Thread.currentThread().getContextClassLoader()); Locale avail = bundle.getLocale(); if (pref.equals(avail)) { // Exact match match = bundle; } else { if (pref.getLanguage().equals(avail.getLanguage()) && ("".equals(avail.getCountry()))) { // Language match. // By making sure the available locale does not have a // country and matches the preferred locale's language, we // rule out "matches" based on the container's default // locale. For example, if the preferred locale is // "en-US", the container's default locale is "en-UK", and // there is a resource bundle (with the requested base // name) available for "en-UK", ResourceBundle.getBundle() // will return it, but even though its language matches // that of the preferred locale, we must ignore it, // because matches based on the container's default locale // are not portable across different containers with // different default locales. match = bundle; } } } catch (MissingResourceException mre) { } return match; }
ResourceBundle.getBundle(basename, pref, Thread.currentThread().getContextClassLoader());
ResourceBundle.getBundle(basename, pref, cl);
private static ResourceBundle findMatch(String basename, Locale pref) { ResourceBundle match = null; try { ResourceBundle bundle = ResourceBundle.getBundle(basename, pref, Thread.currentThread().getContextClassLoader()); Locale avail = bundle.getLocale(); if (pref.equals(avail)) { // Exact match match = bundle; } else { if (pref.getLanguage().equals(avail.getLanguage()) && ("".equals(avail.getCountry()))) { // Language match. // By making sure the available locale does not have a // country and matches the preferred locale's language, we // rule out "matches" based on the container's default // locale. For example, if the preferred locale is // "en-US", the container's default locale is "en-UK", and // there is a resource bundle (with the requested base // name) available for "en-UK", ResourceBundle.getBundle() // will return it, but even though its language matches // that of the preferred locale, we must ignore it, // because matches based on the container's default locale // are not portable across different containers with // different default locales. match = bundle; } } } catch (MissingResourceException mre) { } return match; }
if(description == null)
if(description == null || description.trim().length() == 0)
public String getDescription() { if(description == null) return "No description available."; return description; }
HaploviewTab currentTab = ldTab;
void readGenotypes(String[] inputOptions, int type){ //input is a 2 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = custom association test list file (null if none) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); final AssociationTestSet customAssocSet; try { if (inputOptions[2] != null && inputOptions[1] == null){ throw new HaploViewException("A marker information file is required if a tests file is specified."); } this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS_FILE){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_ASSOC_NUM].setEnabled(false); Options.setAssocTest(ASSOC_NONE); } theData = new HaploData(); if (type == HAPS_FILE){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ theData.linkageToChrom(inFile, type); } if(type != HAPS_FILE && theData.getPedFile().isBogusParents()) { JOptionPane.showMessageDialog(this, "One or more individuals in the file reference non-existent parents.\nThese references have been ignored.", "File Error", JOptionPane.ERROR_MESSAGE); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1] == null){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } //turn on/off gbrowse menu if (Options.isGBrowseShown()){ gbEditItem.setEnabled(true); }else{ gbEditItem.setEnabled(false); } checkPanel = null; if (type == HAPS_FILE){ readMarkers(markerFile, null); //initialize realIndex Chromosome.doFilter(Chromosome.getUnfilteredSize()); customAssocSet = null; }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); //we read the file in first, so we can whitelist all the markers in the custom test set HashSet whiteListedCustomMarkers = new HashSet(); if (inputOptions[2] != null){ customAssocSet = new AssociationTestSet(inputOptions[2]); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } theData.setWhiteList(whiteListedCustomMarkers); checkPanel = new CheckDataPanel(this); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); //set up the indexing to take into account skipped markers. Chromosome.doFilter(checkPanel.getMarkerResults()); } //let's start the math final SwingWorker worker = new SwingWorker(){ public Object construct(){ for (int i = 0; i < viewMenuItems.length; i++){ viewMenuItems[i].setEnabled(false); } dPrimeDisplay=null; changeKey(); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); HaploviewTab currentTab = ldTab; tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); ldTab = new HaploviewTab(dPrimeScroller); tabs.addTab(VIEW_DPRIME, ldTab); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); hapsTab = new HaploviewTab(hapScroller); hapsTab.add(hdc); tabs.addTab(VIEW_HAPLOTYPES, hapsTab); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); displayMenu.setEnabled(true); analysisMenu.setEnabled(true); //check data panel if (checkPanel != null){ checkTab = new HaploviewTab(checkPanel); cdc = new CheckDataController(window); checkTab.add(cdc); tabs.addTab(VIEW_CHECK_PANEL, checkTab); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=checkTab; } //only show tagger if we have a .info file if (theData.infoKnown){ //tagger display taggerConfigPanel = new TaggerConfigPanel(theData); JPanel metaTagPanel = new JPanel(); metaTagPanel.setLayout(new BoxLayout(metaTagPanel,BoxLayout.Y_AXIS)); metaTagPanel.add(taggerConfigPanel); JTabbedPane tagTabs = new JTabbedPane(); tagTabs.add("Configuration",metaTagPanel); JPanel resMetaPanel = new JPanel(); resMetaPanel.setLayout(new BoxLayout(resMetaPanel,BoxLayout.Y_AXIS)); TaggerResultsPanel tagResultsPanel = new TaggerResultsPanel(); taggerConfigPanel.addActionListener(tagResultsPanel); resMetaPanel.add(tagResultsPanel); tagTabs.addTab("Results",resMetaPanel); taggerTab = new HaploviewTab(tagTabs); tabs.addTab(VIEW_TAGGER,taggerTab); viewMenuItems[VIEW_TAGGER_NUM].setEnabled(true); } //Association panel if(Options.getAssocTest() != ASSOC_NONE) { JTabbedPane metaAssoc = new JTabbedPane(); try{ tdtPanel = new TDTPanel(new AssociationTestSet(theData.getPedFile(), null, Chromosome.getAllMarkers())); } catch(PedFileException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } metaAssoc.add("Single Marker", tdtPanel); hapAssocPanel = new HaploAssocPanel(new AssociationTestSet(theData.getHaplotypes(), null)); metaAssoc.add("Haplotypes", hapAssocPanel); //custom association tests custAssocPanel = null; if(customAssocSet != null) { try { customAssocSet.runFileTests(theData, tdtPanel.getTestSet().getMarkerAssociationResults()); custAssocPanel = new CustomAssocPanel(customAssocSet); metaAssoc.addTab("Custom",custAssocPanel); metaAssoc.setSelectedComponent(custAssocPanel); } catch (HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } } AssociationTestSet permSet; boolean cust = false; if (custAssocPanel != null){ permSet = custAssocPanel.getTestSet(); cust = true; }else{ permSet = new AssociationTestSet(); permSet.cat(tdtPanel.getTestSet()); permSet.cat(hapAssocPanel.getTestSet()); } permutationPanel = new PermutationTestPanel(new PermutationTestSet(0,theData.getSavedEMs(), theData.getPedFile(),permSet), cust); metaAssoc.add(permutationPanel,"Permutation Tests"); associationTab = new HaploviewTab(metaAssoc); tabs.addTab(VIEW_ASSOC, associationTab); viewMenuItems[VIEW_ASSOC_NUM].setEnabled(true); } tabs.setSelectedComponent(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } }
HaploviewTab currentTab = ldTab;
void readGenotypes(String[] inputOptions, int type){ //input is a 2 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //inputOptions[2] = custom association test list file (null if none) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); final AssociationTestSet customAssocSet; try { if (inputOptions[2] != null && inputOptions[1] == null){ throw new HaploViewException("A marker information file is required if a tests file is specified."); } this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS_FILE){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_ASSOC_NUM].setEnabled(false); Options.setAssocTest(ASSOC_NONE); } theData = new HaploData(); if (type == HAPS_FILE){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ theData.linkageToChrom(inFile, type); } if(type != HAPS_FILE && theData.getPedFile().isBogusParents()) { JOptionPane.showMessageDialog(this, "One or more individuals in the file reference non-existent parents.\nThese references have been ignored.", "File Error", JOptionPane.ERROR_MESSAGE); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1] == null){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } //turn on/off gbrowse menu if (Options.isGBrowseShown()){ gbEditItem.setEnabled(true); }else{ gbEditItem.setEnabled(false); } checkPanel = null; if (type == HAPS_FILE){ readMarkers(markerFile, null); //initialize realIndex Chromosome.doFilter(Chromosome.getUnfilteredSize()); customAssocSet = null; }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); //we read the file in first, so we can whitelist all the markers in the custom test set HashSet whiteListedCustomMarkers = new HashSet(); if (inputOptions[2] != null){ customAssocSet = new AssociationTestSet(inputOptions[2]); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } theData.setWhiteList(whiteListedCustomMarkers); checkPanel = new CheckDataPanel(this); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); //set up the indexing to take into account skipped markers. Chromosome.doFilter(checkPanel.getMarkerResults()); } //let's start the math final SwingWorker worker = new SwingWorker(){ public Object construct(){ for (int i = 0; i < viewMenuItems.length; i++){ viewMenuItems[i].setEnabled(false); } dPrimeDisplay=null; changeKey(); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); HaploviewTab currentTab = ldTab; tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); ldTab = new HaploviewTab(dPrimeScroller); tabs.addTab(VIEW_DPRIME, ldTab); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); hapsTab = new HaploviewTab(hapScroller); hapsTab.add(hdc); tabs.addTab(VIEW_HAPLOTYPES, hapsTab); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); displayMenu.setEnabled(true); analysisMenu.setEnabled(true); //check data panel if (checkPanel != null){ checkTab = new HaploviewTab(checkPanel); cdc = new CheckDataController(window); checkTab.add(cdc); tabs.addTab(VIEW_CHECK_PANEL, checkTab); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=checkTab; } //only show tagger if we have a .info file if (theData.infoKnown){ //tagger display taggerConfigPanel = new TaggerConfigPanel(theData); JPanel metaTagPanel = new JPanel(); metaTagPanel.setLayout(new BoxLayout(metaTagPanel,BoxLayout.Y_AXIS)); metaTagPanel.add(taggerConfigPanel); JTabbedPane tagTabs = new JTabbedPane(); tagTabs.add("Configuration",metaTagPanel); JPanel resMetaPanel = new JPanel(); resMetaPanel.setLayout(new BoxLayout(resMetaPanel,BoxLayout.Y_AXIS)); TaggerResultsPanel tagResultsPanel = new TaggerResultsPanel(); taggerConfigPanel.addActionListener(tagResultsPanel); resMetaPanel.add(tagResultsPanel); tagTabs.addTab("Results",resMetaPanel); taggerTab = new HaploviewTab(tagTabs); tabs.addTab(VIEW_TAGGER,taggerTab); viewMenuItems[VIEW_TAGGER_NUM].setEnabled(true); } //Association panel if(Options.getAssocTest() != ASSOC_NONE) { JTabbedPane metaAssoc = new JTabbedPane(); try{ tdtPanel = new TDTPanel(new AssociationTestSet(theData.getPedFile(), null, Chromosome.getAllMarkers())); } catch(PedFileException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } metaAssoc.add("Single Marker", tdtPanel); hapAssocPanel = new HaploAssocPanel(new AssociationTestSet(theData.getHaplotypes(), null)); metaAssoc.add("Haplotypes", hapAssocPanel); //custom association tests custAssocPanel = null; if(customAssocSet != null) { try { customAssocSet.runFileTests(theData, tdtPanel.getTestSet().getMarkerAssociationResults()); custAssocPanel = new CustomAssocPanel(customAssocSet); metaAssoc.addTab("Custom",custAssocPanel); metaAssoc.setSelectedComponent(custAssocPanel); } catch (HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } } AssociationTestSet permSet; boolean cust = false; if (custAssocPanel != null){ permSet = custAssocPanel.getTestSet(); cust = true; }else{ permSet = new AssociationTestSet(); permSet.cat(tdtPanel.getTestSet()); permSet.cat(hapAssocPanel.getTestSet()); } permutationPanel = new PermutationTestPanel(new PermutationTestSet(0,theData.getSavedEMs(), theData.getPedFile(),permSet), cust); metaAssoc.add(permutationPanel,"Permutation Tests"); associationTab = new HaploviewTab(metaAssoc); tabs.addTab(VIEW_ASSOC, associationTab); viewMenuItems[VIEW_ASSOC_NUM].setEnabled(true); } tabs.setSelectedComponent(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } }
HaploviewTab currentTab = ldTab;
public Object construct(){ for (int i = 0; i < viewMenuItems.length; i++){ viewMenuItems[i].setEnabled(false); } dPrimeDisplay=null; changeKey(); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); HaploviewTab currentTab = ldTab; tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); ldTab = new HaploviewTab(dPrimeScroller); tabs.addTab(VIEW_DPRIME, ldTab); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); hapsTab = new HaploviewTab(hapScroller); hapsTab.add(hdc); tabs.addTab(VIEW_HAPLOTYPES, hapsTab); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); displayMenu.setEnabled(true); analysisMenu.setEnabled(true); //check data panel if (checkPanel != null){ checkTab = new HaploviewTab(checkPanel); cdc = new CheckDataController(window); checkTab.add(cdc); tabs.addTab(VIEW_CHECK_PANEL, checkTab); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=checkTab; } //only show tagger if we have a .info file if (theData.infoKnown){ //tagger display taggerConfigPanel = new TaggerConfigPanel(theData); JPanel metaTagPanel = new JPanel(); metaTagPanel.setLayout(new BoxLayout(metaTagPanel,BoxLayout.Y_AXIS)); metaTagPanel.add(taggerConfigPanel); JTabbedPane tagTabs = new JTabbedPane(); tagTabs.add("Configuration",metaTagPanel); JPanel resMetaPanel = new JPanel(); resMetaPanel.setLayout(new BoxLayout(resMetaPanel,BoxLayout.Y_AXIS)); TaggerResultsPanel tagResultsPanel = new TaggerResultsPanel(); taggerConfigPanel.addActionListener(tagResultsPanel); resMetaPanel.add(tagResultsPanel); tagTabs.addTab("Results",resMetaPanel); taggerTab = new HaploviewTab(tagTabs); tabs.addTab(VIEW_TAGGER,taggerTab); viewMenuItems[VIEW_TAGGER_NUM].setEnabled(true); } //Association panel if(Options.getAssocTest() != ASSOC_NONE) { JTabbedPane metaAssoc = new JTabbedPane(); try{ tdtPanel = new TDTPanel(new AssociationTestSet(theData.getPedFile(), null, Chromosome.getAllMarkers())); } catch(PedFileException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } metaAssoc.add("Single Marker", tdtPanel); hapAssocPanel = new HaploAssocPanel(new AssociationTestSet(theData.getHaplotypes(), null)); metaAssoc.add("Haplotypes", hapAssocPanel); //custom association tests custAssocPanel = null; if(customAssocSet != null) { try { customAssocSet.runFileTests(theData, tdtPanel.getTestSet().getMarkerAssociationResults()); custAssocPanel = new CustomAssocPanel(customAssocSet); metaAssoc.addTab("Custom",custAssocPanel); metaAssoc.setSelectedComponent(custAssocPanel); } catch (HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } } AssociationTestSet permSet; boolean cust = false; if (custAssocPanel != null){ permSet = custAssocPanel.getTestSet(); cust = true; }else{ permSet = new AssociationTestSet(); permSet.cat(tdtPanel.getTestSet()); permSet.cat(hapAssocPanel.getTestSet()); } permutationPanel = new PermutationTestPanel(new PermutationTestSet(0,theData.getSavedEMs(), theData.getPedFile(),permSet), cust); metaAssoc.add(permutationPanel,"Permutation Tests"); associationTab = new HaploviewTab(metaAssoc); tabs.addTab(VIEW_ASSOC, associationTab); viewMenuItems[VIEW_ASSOC_NUM].setEnabled(true); } tabs.setSelectedComponent(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; }
HaploviewTab currentTab = ldTab;
public Object construct(){ for (int i = 0; i < viewMenuItems.length; i++){ viewMenuItems[i].setEnabled(false); } dPrimeDisplay=null; changeKey(); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); HaploviewTab currentTab = ldTab; tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); ldTab = new HaploviewTab(dPrimeScroller); tabs.addTab(VIEW_DPRIME, ldTab); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); hapsTab = new HaploviewTab(hapScroller); hapsTab.add(hdc); tabs.addTab(VIEW_HAPLOTYPES, hapsTab); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); displayMenu.setEnabled(true); analysisMenu.setEnabled(true); //check data panel if (checkPanel != null){ checkTab = new HaploviewTab(checkPanel); cdc = new CheckDataController(window); checkTab.add(cdc); tabs.addTab(VIEW_CHECK_PANEL, checkTab); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=checkTab; } //only show tagger if we have a .info file if (theData.infoKnown){ //tagger display taggerConfigPanel = new TaggerConfigPanel(theData); JPanel metaTagPanel = new JPanel(); metaTagPanel.setLayout(new BoxLayout(metaTagPanel,BoxLayout.Y_AXIS)); metaTagPanel.add(taggerConfigPanel); JTabbedPane tagTabs = new JTabbedPane(); tagTabs.add("Configuration",metaTagPanel); JPanel resMetaPanel = new JPanel(); resMetaPanel.setLayout(new BoxLayout(resMetaPanel,BoxLayout.Y_AXIS)); TaggerResultsPanel tagResultsPanel = new TaggerResultsPanel(); taggerConfigPanel.addActionListener(tagResultsPanel); resMetaPanel.add(tagResultsPanel); tagTabs.addTab("Results",resMetaPanel); taggerTab = new HaploviewTab(tagTabs); tabs.addTab(VIEW_TAGGER,taggerTab); viewMenuItems[VIEW_TAGGER_NUM].setEnabled(true); } //Association panel if(Options.getAssocTest() != ASSOC_NONE) { JTabbedPane metaAssoc = new JTabbedPane(); try{ tdtPanel = new TDTPanel(new AssociationTestSet(theData.getPedFile(), null, Chromosome.getAllMarkers())); } catch(PedFileException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } metaAssoc.add("Single Marker", tdtPanel); hapAssocPanel = new HaploAssocPanel(new AssociationTestSet(theData.getHaplotypes(), null)); metaAssoc.add("Haplotypes", hapAssocPanel); //custom association tests custAssocPanel = null; if(customAssocSet != null) { try { customAssocSet.runFileTests(theData, tdtPanel.getTestSet().getMarkerAssociationResults()); custAssocPanel = new CustomAssocPanel(customAssocSet); metaAssoc.addTab("Custom",custAssocPanel); metaAssoc.setSelectedComponent(custAssocPanel); } catch (HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } } AssociationTestSet permSet; boolean cust = false; if (custAssocPanel != null){ permSet = custAssocPanel.getTestSet(); cust = true; }else{ permSet = new AssociationTestSet(); permSet.cat(tdtPanel.getTestSet()); permSet.cat(hapAssocPanel.getTestSet()); } permutationPanel = new PermutationTestPanel(new PermutationTestSet(0,theData.getSavedEMs(), theData.getPedFile(),permSet), cust); metaAssoc.add(permutationPanel,"Permutation Tests"); associationTab = new HaploviewTab(metaAssoc); tabs.addTab(VIEW_ASSOC, associationTab); viewMenuItems[VIEW_ASSOC_NUM].setEnabled(true); } tabs.setSelectedComponent(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; }
wmInteriorRect.x = visRect.x + (worldmap.getWidth() - wmInteriorRect.width);
wmInteriorRect.x = visRect.x + (worldmap.getWidth() - wmInteriorRect.width)/2;
public void paintComponent(Graphics g){ PairwiseLinkage[][] dPrimeTable = theData.filteredDPrimeTable; Vector blocks = theData.blocks; Rectangle visRect = getVisibleRect(); //deal with zooming if (chartSize.getWidth() > (3*visRect.width)){ showWM = true; }else{ showWM = false; } if (zoomLevel == 0){ printDetails = true; } else{ printDetails = false; } Graphics2D g2 = (Graphics2D) g; Dimension size = getSize(); Dimension pref = getPreferredSize(); if (size.height < pref.height){ setSize(pref); } //okay so this dumb if block is to prevent the ugly repainting //bug when loading markers after the data are already being displayed, //results in a little off-centering for small datasets, but not too bad. if (!forExport){ if (!theData.infoKnown){ g2.translate((size.width - pref.width) / 2, (size.height - pref.height) / 2); } else { g2.translate((size.width - pref.width) / 2, 0); } } FontMetrics boxFontMetrics = g2.getFontMetrics(boxFont); int diamondX[] = new int[4]; int diamondY[] = new int[4]; Polygon diamond; left = H_BORDER; top = V_BORDER; if (forExport){ left -= exportStart * boxSize; } FontMetrics metrics; int ascent; g2.setColor(BG_GREY); g2.fillRect(0,0,pref.width,pref.height); g2.setColor(Color.black); g2.setFont(boldMarkerNameFont); metrics = g2.getFontMetrics(); ascent = metrics.getAscent(); //TODO: finish implementing scaling gizmo /*//deal with adding some space to better display data with large gaps int cumulativeGap[] = new int[Chromosome.getFilteredSize()]; for (int i = 0; i < cumulativeGap.length; i++){ cumulativeGap[i] = 0; } if (theData.infoKnown){ double mean = (((SNP)Chromosome.markers[Chromosome.markers.length-1]).getPosition() - ((SNP)Chromosome.markers[0]).getPosition())/Chromosome.markers.length-1; for (int i = 1; i < cumulativeGap.length; i++){ double sep = Chromosome.getMarker(i).getPosition() - Chromosome.getMarker(i-1).getPosition(); if (sep > mean*10){ cumulativeGap[i] = cumulativeGap[i-1] + (int)(sep/mean)*4; }else{ cumulativeGap[i] = cumulativeGap[i-1]; } } } */ //the following values are the bounds on the boxes we want to //display given that the current window is 'visRect' lowX = (visRect.x-clickXShift-(visRect.y + visRect.height-clickYShift))/boxSize; if (lowX < 0) { lowX = 0; } highX = ((visRect.x + visRect.width)/boxSize)+1; if (highX > dPrimeTable.length-1){ highX = dPrimeTable.length-1; } lowY = ((visRect.x-clickXShift)+(visRect.y-clickYShift))/boxSize; if (lowY < lowX+1){ lowY = lowX+1; } highY = (((visRect.x-clickXShift+visRect.width) + (visRect.y-clickYShift+visRect.height))/boxSize)+1; if (highY > dPrimeTable.length){ highY = dPrimeTable.length; } if (forExport){ lowX = exportStart; lowY = exportStart; highX = exportStop; highY = exportStop; } if (theData.infoKnown) { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); //// draw the marker locations int lineSpan = (dPrimeTable.length-1) * boxSize; long minpos = Chromosome.getMarker(0).getPosition(); long maxpos = Chromosome.getMarker(Chromosome.getSize()-1).getPosition(); double spanpos = maxpos - minpos; g2.setStroke(thinnerStroke); g2.setColor(Color.white); g2.fillRect(left+1, top+1, lineSpan-1, TICK_HEIGHT-1); g2.setColor(Color.black); g2.drawRect(left, top, lineSpan, TICK_HEIGHT); for (int i = 0; i < Chromosome.getFilteredSize(); i++){ double pos = (Chromosome.getFilteredMarker(i).getPosition() - minpos) / spanpos; int xx = (int) (left + lineSpan*pos); g2.setStroke(thickerStroke); g2.drawLine(xx, top, xx, top + TICK_HEIGHT); g2.setStroke(thinnerStroke); g2.drawLine(xx, top + TICK_HEIGHT, left + i*boxSize, top+TICK_BOTTOM); } top += TICK_BOTTOM + TICK_HEIGHT; //// draw the marker names if (printDetails){ widestMarkerName = metrics.stringWidth(Chromosome.getFilteredMarker(0).getName()); for (int x = 1; x < dPrimeTable.length; x++) { int thiswide = metrics.stringWidth(Chromosome.getFilteredMarker(x).getName()); if (thiswide > widestMarkerName) widestMarkerName = thiswide; } g2.translate(left, top + widestMarkerName); g2.rotate(-Math.PI / 2.0); for (int x = 0; x < dPrimeTable.length; x++) { if (theData.isInBlock[x]){ g2.setFont(boldMarkerNameFont); }else{ g2.setFont(markerNameFont); } g2.drawString(Chromosome.getFilteredMarker(x).getName(),TEXT_GAP, x*boxSize + ascent/3); } g2.rotate(Math.PI / 2.0); g2.translate(-left, -(top + widestMarkerName)); // move everybody down top += widestMarkerName + TEXT_GAP; } g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } top += blockDispHeight; //// draw the marker numbers if (printDetails){ g2.setFont(markerNumFont); metrics = g2.getFontMetrics(); ascent = metrics.getAscent(); for (int x = 0; x < dPrimeTable.length; x++) { String mark = String.valueOf(Chromosome.realIndex[x] + 1); g2.drawString(mark, left + x*boxSize - metrics.stringWidth(mark)/2, top + ascent); } top += boxRadius/2; // give a little space between numbers and boxes } //clickxshift and clickyshift are used later to translate from x,y coords //to the pair of markers comparison at those coords if (!(theData.infoKnown)){ clickXShift = left + (size.width-pref.width)/2; clickYShift = top + (size.height - pref.height)/2; } else { clickXShift = left + (size.width-pref.width)/2; clickYShift = top; } // draw table column by column for (int x = lowX; x < highX; x++) { //always draw the fewest possible boxes if (lowY < x+1){ lowY = x+1; } for (int y = lowY; y < highY; y++) { if (dPrimeTable[x][y] == null){ continue; } double d = dPrimeTable[x][y].getDPrime(); //double l = dPrimeTable[x][y].getLOD(); Color boxColor = dPrimeTable[x][y].getColor(); // draw markers above int xx = left + (x + y) * boxSize / 2; int yy = top + (y - x) * boxSize / 2; diamondX[0] = xx; diamondY[0] = yy - boxRadius; diamondX[1] = xx + boxRadius; diamondY[1] = yy; diamondX[2] = xx; diamondY[2] = yy + boxRadius; diamondX[3] = xx - boxRadius; diamondY[3] = yy; diamond = new Polygon(diamondX, diamondY, 4); g2.setColor(boxColor); g2.fillPolygon(diamond); if(printDetails){ g2.setFont(boxFont); ascent = boxFontMetrics.getAscent(); int val = (int) (d * 100); g2.setColor((val < 50) ? Color.gray : Color.black); if (boxColor == Color.darkGray){ g2.setColor(Color.white); } if (val != 100) { String valu = String.valueOf(val); int widf = boxFontMetrics.stringWidth(valu); g.drawString(valu, xx - widf/2, yy + ascent/2); } } } } boolean even = true; //highlight blocks g2.setFont(markerNameFont); ascent = g2.getFontMetrics().getAscent(); //g.setColor(new Color(153,255,153)); g2.setColor(Color.black); //g.setColor(new Color(51,153,51)); for (int i = 0; i < blocks.size(); i++){ int[] theBlock = (int[])blocks.elementAt(i); int first = theBlock[0]; int last = theBlock[theBlock.length-1]; //big vee around whole thing g2.setStroke(fatStroke); g2.drawLine(left + (2*first) * boxSize/2 - boxRadius, top, left + (first + last) * boxSize/2, top + (last - first) * boxSize/2 + boxRadius); g2.drawLine(left + (first + last) * boxSize/2, top + (last - first) * boxSize/2 + boxRadius, left + (2*last) * boxSize/2+boxRadius, top); for (int j = first; j <= last; j++){ if (theData.isInBlock[j]){ g2.setStroke(fatStroke); }else{ g2.setStroke(dashedFatStroke); } g2.drawLine(left+j*boxSize-boxSize/2, top-blockDispHeight, left+j*boxSize+boxSize/2, top-blockDispHeight); } //lines to connect to block display g2.setStroke(fatStroke); g2.drawLine(left + first*boxSize-boxSize/2, top-1, left+first*boxSize-boxSize/2, top-blockDispHeight); g2.drawLine(left+last*boxSize+boxSize/2, top-1, left+last*boxSize+boxSize/2, top-blockDispHeight); if (printDetails){ String labelString = new String ("Block " + (i+1)); if (theData.infoKnown){ long blockSize = Chromosome.getMarker(last).getPosition() - Chromosome.getMarker(first).getPosition(); labelString += " (" + blockSize/1000 + " kb)"; } g2.drawString(labelString, left+first*boxSize-boxSize/2+TEXT_GAP, top-boxSize/3); } } g2.setStroke(thickerStroke); //see if the user has right-clicked to popup some marker info if(popupExists){ int smallDatasetSlopH = 0; int smallDatasetSlopV = 0; if (pref.getWidth() < visRect.width){ //dumb bug where little datasets popup the box in the wrong place smallDatasetSlopH = (int)(visRect.width - pref.getWidth())/2; smallDatasetSlopV = (int)(visRect.height - pref.getHeight())/2; } g2.setColor(Color.white); g2.fillRect(popupDrawRect.x+1-smallDatasetSlopH, popupDrawRect.y+1-smallDatasetSlopV, popupDrawRect.width-1, popupDrawRect.height-1); g2.setColor(Color.black); g2.drawRect(popupDrawRect.x-smallDatasetSlopH, popupDrawRect.y-smallDatasetSlopV, popupDrawRect.width, popupDrawRect.height); for (int x = 0; x < displayStrings.length; x++){ g.drawString(displayStrings[x],popupDrawRect.x + popupLeftMargin-smallDatasetSlopH, popupDrawRect.y+((x+1)*metrics.getHeight())-smallDatasetSlopV); } } if (showWM && !forExport){ //dataset is big enough to require worldmap final int WM_BD_GAP = 1; final int WM_BD_HEIGHT = 2; final int WM_BD_TOTAL = WM_BD_HEIGHT + 2*WM_BD_GAP; CompoundBorder wmBorder = new CompoundBorder(BorderFactory.createRaisedBevelBorder(), BorderFactory.createLoweredBevelBorder()); if (wmMaxWidth == 0){ wmMaxWidth = visRect.width/3; } double scalefactor; scalefactor = (double)(chartSize.width)/wmMaxWidth; double prefBoxSize = boxSize/(scalefactor*((double)wmMaxWidth/(double)(wmMaxWidth-WM_BD_TOTAL))); if (noImage){ //first time through draw a worldmap if dataset is big: worldmap = new BufferedImage((int)(chartSize.width/scalefactor)+wmBorder.getBorderInsets(this).left*2, (int)(chartSize.height/scalefactor)+wmBorder.getBorderInsets(this).top*2+WM_BD_TOTAL, BufferedImage.TYPE_3BYTE_BGR); Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(BG_GREY); gw2.fillRect(1,1,worldmap.getWidth()-1,worldmap.getHeight()-1); //make a pretty border gw2.setColor(Color.black); wmBorder.paintBorder(this,gw2,0,0,worldmap.getWidth(),worldmap.getHeight()); wmInteriorRect = wmBorder.getInteriorRectangle(this,0,0,worldmap.getWidth(), worldmap.getHeight()); float[] smallDiamondX = new float[4]; float[] smallDiamondY = new float[4]; GeneralPath gp; for (int x = 0; x < dPrimeTable.length-1; x++){ for (int y = x+1; y < dPrimeTable.length; y++){ if (dPrimeTable[x][y] == null){ continue; } double xx = (x + y)*prefBoxSize/2+wmBorder.getBorderInsets(this).left; double yy = (y - x)*prefBoxSize/2+wmBorder.getBorderInsets(this).top + WM_BD_TOTAL; smallDiamondX[0] = (float)xx; smallDiamondY[0] = (float)(yy - prefBoxSize/2); smallDiamondX[1] = (float)(xx + prefBoxSize/2); smallDiamondY[1] = (float)yy; smallDiamondX[2] = (float)xx; smallDiamondY[2] = (float)(yy + prefBoxSize/2); smallDiamondX[3] = (float)(xx - prefBoxSize/2); smallDiamondY[3] = (float)yy; gp = new GeneralPath(GeneralPath.WIND_EVEN_ODD, smallDiamondX.length); gp.moveTo(smallDiamondX[0],smallDiamondY[0]); for (int i = 1; i < smallDiamondX.length; i++){ gp.lineTo(smallDiamondX[i], smallDiamondY[i]); } gp.closePath(); gw2.setColor(dPrimeTable[x][y].getColor()); gw2.fill(gp); } } noImage = false; } //draw block display in worldmap Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(BG_GREY); gw2.fillRect(wmBorder.getBorderInsets(this).left, wmBorder.getBorderInsets(this).top+WM_BD_GAP, wmInteriorRect.width, WM_BD_HEIGHT); gw2.setColor(Color.black); even = true; for (int i = 0; i < blocks.size(); i++){ int first = ((int[])blocks.elementAt(i))[0]; int last = ((int[])blocks.elementAt(i))[((int[])blocks.elementAt(i)).length-1]; int voffset; if (even){ voffset = 0; }else{ voffset = WM_BD_HEIGHT/2; } gw2.fillRect(wmBorder.getBorderInsets(this).left+(int)(prefBoxSize*first), wmBorder.getBorderInsets(this).top+voffset+WM_BD_GAP, (int)((last-first+1)*prefBoxSize), WM_BD_HEIGHT/2); even = !even; } wmResizeCorner = new Rectangle(visRect.x + worldmap.getWidth() - (worldmap.getWidth()-wmInteriorRect.width)/2, visRect.y + visRect.height - worldmap.getHeight(), (worldmap.getWidth()-wmInteriorRect.width)/2, (worldmap.getHeight() -wmInteriorRect.height)/2); g2.drawImage(worldmap,visRect.x, visRect.y + visRect.height - worldmap.getHeight(), this); wmInteriorRect.x = visRect.x + (worldmap.getWidth() - wmInteriorRect.width); wmInteriorRect.y = visRect.y+visRect.height-worldmap.getHeight() + (worldmap.getHeight() - wmInteriorRect.height); //draw the outline of the viewport g2.setColor(Color.black); double hRatio = wmInteriorRect.getWidth()/pref.getWidth(); double vRatio = wmInteriorRect.getHeight()/pref.getHeight(); int hBump = worldmap.getWidth()-wmInteriorRect.width; int vBump = worldmap.getHeight()-wmInteriorRect.height; //bump a few pixels to avoid drawing on the border g2.drawRect((int)(visRect.x*hRatio)+hBump/2+visRect.x, (int)(visRect.y*vRatio)+vBump/2+(visRect.y + visRect.height - worldmap.getHeight()), (int)(visRect.width*hRatio), (int)(visRect.height*vRatio)); } //see if we're drawing a worldmap resize rect if (resizeRectExists){ g2.setColor(Color.black); g2.drawRect(resizeWMRect.x, resizeWMRect.y, resizeWMRect.width, resizeWMRect.height); } //see if we're drawing a block selector rect if (blockRectExists){ g2.setColor(Color.black); g2.setStroke(dashedThinStroke); g2.drawRect(blockRect.x, blockRect.y, blockRect.width, blockRect.height); } }
(worldmap.getHeight() - wmInteriorRect.height);
(worldmap.getHeight() - wmInteriorRect.height)/2;
public void paintComponent(Graphics g){ PairwiseLinkage[][] dPrimeTable = theData.filteredDPrimeTable; Vector blocks = theData.blocks; Rectangle visRect = getVisibleRect(); //deal with zooming if (chartSize.getWidth() > (3*visRect.width)){ showWM = true; }else{ showWM = false; } if (zoomLevel == 0){ printDetails = true; } else{ printDetails = false; } Graphics2D g2 = (Graphics2D) g; Dimension size = getSize(); Dimension pref = getPreferredSize(); if (size.height < pref.height){ setSize(pref); } //okay so this dumb if block is to prevent the ugly repainting //bug when loading markers after the data are already being displayed, //results in a little off-centering for small datasets, but not too bad. if (!forExport){ if (!theData.infoKnown){ g2.translate((size.width - pref.width) / 2, (size.height - pref.height) / 2); } else { g2.translate((size.width - pref.width) / 2, 0); } } FontMetrics boxFontMetrics = g2.getFontMetrics(boxFont); int diamondX[] = new int[4]; int diamondY[] = new int[4]; Polygon diamond; left = H_BORDER; top = V_BORDER; if (forExport){ left -= exportStart * boxSize; } FontMetrics metrics; int ascent; g2.setColor(BG_GREY); g2.fillRect(0,0,pref.width,pref.height); g2.setColor(Color.black); g2.setFont(boldMarkerNameFont); metrics = g2.getFontMetrics(); ascent = metrics.getAscent(); //TODO: finish implementing scaling gizmo /*//deal with adding some space to better display data with large gaps int cumulativeGap[] = new int[Chromosome.getFilteredSize()]; for (int i = 0; i < cumulativeGap.length; i++){ cumulativeGap[i] = 0; } if (theData.infoKnown){ double mean = (((SNP)Chromosome.markers[Chromosome.markers.length-1]).getPosition() - ((SNP)Chromosome.markers[0]).getPosition())/Chromosome.markers.length-1; for (int i = 1; i < cumulativeGap.length; i++){ double sep = Chromosome.getMarker(i).getPosition() - Chromosome.getMarker(i-1).getPosition(); if (sep > mean*10){ cumulativeGap[i] = cumulativeGap[i-1] + (int)(sep/mean)*4; }else{ cumulativeGap[i] = cumulativeGap[i-1]; } } } */ //the following values are the bounds on the boxes we want to //display given that the current window is 'visRect' lowX = (visRect.x-clickXShift-(visRect.y + visRect.height-clickYShift))/boxSize; if (lowX < 0) { lowX = 0; } highX = ((visRect.x + visRect.width)/boxSize)+1; if (highX > dPrimeTable.length-1){ highX = dPrimeTable.length-1; } lowY = ((visRect.x-clickXShift)+(visRect.y-clickYShift))/boxSize; if (lowY < lowX+1){ lowY = lowX+1; } highY = (((visRect.x-clickXShift+visRect.width) + (visRect.y-clickYShift+visRect.height))/boxSize)+1; if (highY > dPrimeTable.length){ highY = dPrimeTable.length; } if (forExport){ lowX = exportStart; lowY = exportStart; highX = exportStop; highY = exportStop; } if (theData.infoKnown) { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); //// draw the marker locations int lineSpan = (dPrimeTable.length-1) * boxSize; long minpos = Chromosome.getMarker(0).getPosition(); long maxpos = Chromosome.getMarker(Chromosome.getSize()-1).getPosition(); double spanpos = maxpos - minpos; g2.setStroke(thinnerStroke); g2.setColor(Color.white); g2.fillRect(left+1, top+1, lineSpan-1, TICK_HEIGHT-1); g2.setColor(Color.black); g2.drawRect(left, top, lineSpan, TICK_HEIGHT); for (int i = 0; i < Chromosome.getFilteredSize(); i++){ double pos = (Chromosome.getFilteredMarker(i).getPosition() - minpos) / spanpos; int xx = (int) (left + lineSpan*pos); g2.setStroke(thickerStroke); g2.drawLine(xx, top, xx, top + TICK_HEIGHT); g2.setStroke(thinnerStroke); g2.drawLine(xx, top + TICK_HEIGHT, left + i*boxSize, top+TICK_BOTTOM); } top += TICK_BOTTOM + TICK_HEIGHT; //// draw the marker names if (printDetails){ widestMarkerName = metrics.stringWidth(Chromosome.getFilteredMarker(0).getName()); for (int x = 1; x < dPrimeTable.length; x++) { int thiswide = metrics.stringWidth(Chromosome.getFilteredMarker(x).getName()); if (thiswide > widestMarkerName) widestMarkerName = thiswide; } g2.translate(left, top + widestMarkerName); g2.rotate(-Math.PI / 2.0); for (int x = 0; x < dPrimeTable.length; x++) { if (theData.isInBlock[x]){ g2.setFont(boldMarkerNameFont); }else{ g2.setFont(markerNameFont); } g2.drawString(Chromosome.getFilteredMarker(x).getName(),TEXT_GAP, x*boxSize + ascent/3); } g2.rotate(Math.PI / 2.0); g2.translate(-left, -(top + widestMarkerName)); // move everybody down top += widestMarkerName + TEXT_GAP; } g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } top += blockDispHeight; //// draw the marker numbers if (printDetails){ g2.setFont(markerNumFont); metrics = g2.getFontMetrics(); ascent = metrics.getAscent(); for (int x = 0; x < dPrimeTable.length; x++) { String mark = String.valueOf(Chromosome.realIndex[x] + 1); g2.drawString(mark, left + x*boxSize - metrics.stringWidth(mark)/2, top + ascent); } top += boxRadius/2; // give a little space between numbers and boxes } //clickxshift and clickyshift are used later to translate from x,y coords //to the pair of markers comparison at those coords if (!(theData.infoKnown)){ clickXShift = left + (size.width-pref.width)/2; clickYShift = top + (size.height - pref.height)/2; } else { clickXShift = left + (size.width-pref.width)/2; clickYShift = top; } // draw table column by column for (int x = lowX; x < highX; x++) { //always draw the fewest possible boxes if (lowY < x+1){ lowY = x+1; } for (int y = lowY; y < highY; y++) { if (dPrimeTable[x][y] == null){ continue; } double d = dPrimeTable[x][y].getDPrime(); //double l = dPrimeTable[x][y].getLOD(); Color boxColor = dPrimeTable[x][y].getColor(); // draw markers above int xx = left + (x + y) * boxSize / 2; int yy = top + (y - x) * boxSize / 2; diamondX[0] = xx; diamondY[0] = yy - boxRadius; diamondX[1] = xx + boxRadius; diamondY[1] = yy; diamondX[2] = xx; diamondY[2] = yy + boxRadius; diamondX[3] = xx - boxRadius; diamondY[3] = yy; diamond = new Polygon(diamondX, diamondY, 4); g2.setColor(boxColor); g2.fillPolygon(diamond); if(printDetails){ g2.setFont(boxFont); ascent = boxFontMetrics.getAscent(); int val = (int) (d * 100); g2.setColor((val < 50) ? Color.gray : Color.black); if (boxColor == Color.darkGray){ g2.setColor(Color.white); } if (val != 100) { String valu = String.valueOf(val); int widf = boxFontMetrics.stringWidth(valu); g.drawString(valu, xx - widf/2, yy + ascent/2); } } } } boolean even = true; //highlight blocks g2.setFont(markerNameFont); ascent = g2.getFontMetrics().getAscent(); //g.setColor(new Color(153,255,153)); g2.setColor(Color.black); //g.setColor(new Color(51,153,51)); for (int i = 0; i < blocks.size(); i++){ int[] theBlock = (int[])blocks.elementAt(i); int first = theBlock[0]; int last = theBlock[theBlock.length-1]; //big vee around whole thing g2.setStroke(fatStroke); g2.drawLine(left + (2*first) * boxSize/2 - boxRadius, top, left + (first + last) * boxSize/2, top + (last - first) * boxSize/2 + boxRadius); g2.drawLine(left + (first + last) * boxSize/2, top + (last - first) * boxSize/2 + boxRadius, left + (2*last) * boxSize/2+boxRadius, top); for (int j = first; j <= last; j++){ if (theData.isInBlock[j]){ g2.setStroke(fatStroke); }else{ g2.setStroke(dashedFatStroke); } g2.drawLine(left+j*boxSize-boxSize/2, top-blockDispHeight, left+j*boxSize+boxSize/2, top-blockDispHeight); } //lines to connect to block display g2.setStroke(fatStroke); g2.drawLine(left + first*boxSize-boxSize/2, top-1, left+first*boxSize-boxSize/2, top-blockDispHeight); g2.drawLine(left+last*boxSize+boxSize/2, top-1, left+last*boxSize+boxSize/2, top-blockDispHeight); if (printDetails){ String labelString = new String ("Block " + (i+1)); if (theData.infoKnown){ long blockSize = Chromosome.getMarker(last).getPosition() - Chromosome.getMarker(first).getPosition(); labelString += " (" + blockSize/1000 + " kb)"; } g2.drawString(labelString, left+first*boxSize-boxSize/2+TEXT_GAP, top-boxSize/3); } } g2.setStroke(thickerStroke); //see if the user has right-clicked to popup some marker info if(popupExists){ int smallDatasetSlopH = 0; int smallDatasetSlopV = 0; if (pref.getWidth() < visRect.width){ //dumb bug where little datasets popup the box in the wrong place smallDatasetSlopH = (int)(visRect.width - pref.getWidth())/2; smallDatasetSlopV = (int)(visRect.height - pref.getHeight())/2; } g2.setColor(Color.white); g2.fillRect(popupDrawRect.x+1-smallDatasetSlopH, popupDrawRect.y+1-smallDatasetSlopV, popupDrawRect.width-1, popupDrawRect.height-1); g2.setColor(Color.black); g2.drawRect(popupDrawRect.x-smallDatasetSlopH, popupDrawRect.y-smallDatasetSlopV, popupDrawRect.width, popupDrawRect.height); for (int x = 0; x < displayStrings.length; x++){ g.drawString(displayStrings[x],popupDrawRect.x + popupLeftMargin-smallDatasetSlopH, popupDrawRect.y+((x+1)*metrics.getHeight())-smallDatasetSlopV); } } if (showWM && !forExport){ //dataset is big enough to require worldmap final int WM_BD_GAP = 1; final int WM_BD_HEIGHT = 2; final int WM_BD_TOTAL = WM_BD_HEIGHT + 2*WM_BD_GAP; CompoundBorder wmBorder = new CompoundBorder(BorderFactory.createRaisedBevelBorder(), BorderFactory.createLoweredBevelBorder()); if (wmMaxWidth == 0){ wmMaxWidth = visRect.width/3; } double scalefactor; scalefactor = (double)(chartSize.width)/wmMaxWidth; double prefBoxSize = boxSize/(scalefactor*((double)wmMaxWidth/(double)(wmMaxWidth-WM_BD_TOTAL))); if (noImage){ //first time through draw a worldmap if dataset is big: worldmap = new BufferedImage((int)(chartSize.width/scalefactor)+wmBorder.getBorderInsets(this).left*2, (int)(chartSize.height/scalefactor)+wmBorder.getBorderInsets(this).top*2+WM_BD_TOTAL, BufferedImage.TYPE_3BYTE_BGR); Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(BG_GREY); gw2.fillRect(1,1,worldmap.getWidth()-1,worldmap.getHeight()-1); //make a pretty border gw2.setColor(Color.black); wmBorder.paintBorder(this,gw2,0,0,worldmap.getWidth(),worldmap.getHeight()); wmInteriorRect = wmBorder.getInteriorRectangle(this,0,0,worldmap.getWidth(), worldmap.getHeight()); float[] smallDiamondX = new float[4]; float[] smallDiamondY = new float[4]; GeneralPath gp; for (int x = 0; x < dPrimeTable.length-1; x++){ for (int y = x+1; y < dPrimeTable.length; y++){ if (dPrimeTable[x][y] == null){ continue; } double xx = (x + y)*prefBoxSize/2+wmBorder.getBorderInsets(this).left; double yy = (y - x)*prefBoxSize/2+wmBorder.getBorderInsets(this).top + WM_BD_TOTAL; smallDiamondX[0] = (float)xx; smallDiamondY[0] = (float)(yy - prefBoxSize/2); smallDiamondX[1] = (float)(xx + prefBoxSize/2); smallDiamondY[1] = (float)yy; smallDiamondX[2] = (float)xx; smallDiamondY[2] = (float)(yy + prefBoxSize/2); smallDiamondX[3] = (float)(xx - prefBoxSize/2); smallDiamondY[3] = (float)yy; gp = new GeneralPath(GeneralPath.WIND_EVEN_ODD, smallDiamondX.length); gp.moveTo(smallDiamondX[0],smallDiamondY[0]); for (int i = 1; i < smallDiamondX.length; i++){ gp.lineTo(smallDiamondX[i], smallDiamondY[i]); } gp.closePath(); gw2.setColor(dPrimeTable[x][y].getColor()); gw2.fill(gp); } } noImage = false; } //draw block display in worldmap Graphics gw = worldmap.getGraphics(); Graphics2D gw2 = (Graphics2D)(gw); gw2.setColor(BG_GREY); gw2.fillRect(wmBorder.getBorderInsets(this).left, wmBorder.getBorderInsets(this).top+WM_BD_GAP, wmInteriorRect.width, WM_BD_HEIGHT); gw2.setColor(Color.black); even = true; for (int i = 0; i < blocks.size(); i++){ int first = ((int[])blocks.elementAt(i))[0]; int last = ((int[])blocks.elementAt(i))[((int[])blocks.elementAt(i)).length-1]; int voffset; if (even){ voffset = 0; }else{ voffset = WM_BD_HEIGHT/2; } gw2.fillRect(wmBorder.getBorderInsets(this).left+(int)(prefBoxSize*first), wmBorder.getBorderInsets(this).top+voffset+WM_BD_GAP, (int)((last-first+1)*prefBoxSize), WM_BD_HEIGHT/2); even = !even; } wmResizeCorner = new Rectangle(visRect.x + worldmap.getWidth() - (worldmap.getWidth()-wmInteriorRect.width)/2, visRect.y + visRect.height - worldmap.getHeight(), (worldmap.getWidth()-wmInteriorRect.width)/2, (worldmap.getHeight() -wmInteriorRect.height)/2); g2.drawImage(worldmap,visRect.x, visRect.y + visRect.height - worldmap.getHeight(), this); wmInteriorRect.x = visRect.x + (worldmap.getWidth() - wmInteriorRect.width); wmInteriorRect.y = visRect.y+visRect.height-worldmap.getHeight() + (worldmap.getHeight() - wmInteriorRect.height); //draw the outline of the viewport g2.setColor(Color.black); double hRatio = wmInteriorRect.getWidth()/pref.getWidth(); double vRatio = wmInteriorRect.getHeight()/pref.getHeight(); int hBump = worldmap.getWidth()-wmInteriorRect.width; int vBump = worldmap.getHeight()-wmInteriorRect.height; //bump a few pixels to avoid drawing on the border g2.drawRect((int)(visRect.x*hRatio)+hBump/2+visRect.x, (int)(visRect.y*vRatio)+vBump/2+(visRect.y + visRect.height - worldmap.getHeight()), (int)(visRect.width*hRatio), (int)(visRect.height*vRatio)); } //see if we're drawing a worldmap resize rect if (resizeRectExists){ g2.setColor(Color.black); g2.drawRect(resizeWMRect.x, resizeWMRect.y, resizeWMRect.width, resizeWMRect.height); } //see if we're drawing a block selector rect if (blockRectExists){ g2.setColor(Color.black); g2.setStroke(dashedThinStroke); g2.drawRect(blockRect.x, blockRect.y, blockRect.width, blockRect.height); } }
setFrequencyCutoff(0);
filterByFrequency(0);
public MarkerAssociationResult(Haplotype[] locusHaplos, String n, SNP snp) { nf.setGroupingUsed(false); for (int i = 0; i < locusHaplos.length; i++){ alleles.add(locusHaplos[i]); } setFrequencyCutoff(0); name = n; this.snp = snp; }
} catch ( IOException e ) {
} catch ( Exception e ) {
protected void createThumbnail( VolumeBase volume ) { log.debug( "Creating thumbnail for " + uid ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Maximum size of the thumbnail int maxThumbWidth = 100; int maxThumbHeight = 100; checkCropBounds(); /* Determine the minimum size for the instance used for thumbnail creation to get decent image quality. The cropped portion of the image must be roughly the same resolution as the intended thumbnail. */ double cropWidth = cropMaxX - cropMinX; cropWidth = ( cropWidth > 0.000001 ) ? cropWidth : 1.0; double cropHeight = cropMaxY - cropMinY; cropHeight = ( cropHeight > 0.000001 ) ? cropHeight : 1.0; int minInstanceWidth = (int)(((double)maxThumbWidth)/cropWidth); int minInstanceHeight = (int)(((double)maxThumbHeight)/cropHeight); int minInstanceSide = Math.max( minInstanceWidth, minInstanceHeight ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.debug( "Found original, reading it..." ); /* We try to ensure that the thumbnail is actually from the original image by comparing aspect ratio of it to original. This is not a perfect check but it will usually catch the most typical errors (like having a the original rotated by RAW conversion SW but still the original EXIF thumbnail. */ double origAspect = this.getAspect( original.getWidth(), original.getHeight(), 1.0 ); double aspectAccuracy = 0.01; // First, check if there is a thumbnail in image header BufferedImage origImage = readExifThumbnail( original.getImageFile() ); if ( origImage == null || !isOkForThumbCreation( origImage.getWidth(), origImage.getHeight(), minInstanceWidth, minInstanceHeight, origAspect, aspectAccuracy ) ) { // Read the image try { File imageFile = original.getImageFile(); String fname = imageFile.getName(); int lastDotPos = fname.lastIndexOf( "." ); if ( lastDotPos <= 0 || lastDotPos >= fname.length()-1 ) { throw new IOException( "Cannot determine file type extension of " + imageFile.getAbsolutePath() ); } String suffix = fname.substring( lastDotPos+1 ); Iterator readers = ImageIO.getImageReadersBySuffix( suffix ); if ( !readers.hasNext() ) { throw new IOException( "Unknown image file extension " + suffix + "\nwhile reading " + imageFile.getAbsolutePath() ); } if ( readers.hasNext() ) { ImageReader reader = (ImageReader)readers.next(); log.debug( "Creating stream" ); ImageInputStream iis = ImageIO.createImageInputStream( original.getImageFile() ); reader.setInput( iis, false, false ); int numThumbs = 0; try { int numImages = reader.getNumImages( true ); numThumbs = reader.getNumThumbnails(0); } catch (IOException ex) { ex.printStackTrace(); } if ( numThumbs > 0 && isOkForThumbCreation( reader.getThumbnailWidth( 0, 0 ), reader.getThumbnailHeight( 0, 0 ) , minInstanceWidth, minInstanceHeight, origAspect, aspectAccuracy ) ) { // There is a thumbanil that is big enough - use it log.debug( "Original has thumbnail, size " + reader.getThumbnailWidth( 0, 0 ) + " x " + reader.getThumbnailHeight( 0, 0 ) ); origImage = reader.readThumbnail( 0, 0 ); log.debug( "Read thumbnail" ); } else { log.debug( "No thumbnail in original" ); ImageReadParam param = reader.getDefaultReadParam(); // Find the maximum subsampling rate we can still use for creating // a quality thumbnail. Some image format readers seem to have // problems with subsampling values (e.g. PNG sometimes crashed // the whole virtual machine, to for now let's do this only // with JPG. int subsampling = 1; if ( suffix.toLowerCase().equals( "jpg" ) ) { int minDim = Math.min( reader.getWidth( 0 ),reader.getHeight( 0 ) ); while ( 2 * minInstanceSide * subsampling < minDim ) { subsampling *= 2; } } param.setSourceSubsampling( subsampling, subsampling, 0, 0 ); origImage = reader.read( 0, param ); // Image image = JAI.create( "fileload", original.getImageFile() ); log.debug( "Read original" ); } iis.close(); } } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } } log.debug( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.debug( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); AffineTransform xform = org.photovault.image.ImageXform.getRotateXform( prefRotation -original.getRotated(), origWidth, origHeight ); ParameterBlockJAI rotParams = new ParameterBlockJAI( "affine" ); rotParams.addSource( origImage ); rotParams.setParameter( "transform", xform ); rotParams.setParameter( "interpolation", Interpolation.getInstance( Interpolation.INTERP_NEAREST ) ); RenderedOp rotatedImage = JAI.create( "affine", rotParams ); ParameterBlockJAI cropParams = new ParameterBlockJAI( "crop" ); cropParams.addSource( rotatedImage ); float cropX = (float)( Math.rint( rotatedImage.getMinX() + cropMinX * rotatedImage.getWidth() ) ); float cropY = (float)( Math.rint( rotatedImage.getMinY() + cropMinY * rotatedImage.getHeight())); float cropW = (float)( Math.rint((cropWidth) * rotatedImage.getWidth() ) ); float cropH = (float) ( Math.rint((cropHeight) * rotatedImage.getHeight() )); cropParams.setParameter( "x", cropX ); cropParams.setParameter( "y", cropY ); cropParams.setParameter( "width", cropW ); cropParams.setParameter( "height", cropH ); RenderedOp cropped = JAI.create("crop", cropParams, null); // Translate the image so that it begins in origo ParameterBlockJAI pbXlate = new ParameterBlockJAI( "translate" ); pbXlate.addSource( cropped ); pbXlate.setParameter( "xTrans", (float) (-cropped.getMinX() ) ); pbXlate.setParameter( "yTrans", (float) (-cropped.getMinY() ) ); RenderedOp xformImage = JAI.create( "translate", pbXlate ); // Finally, scale this to thumbnail AffineTransform thumbScale = org.photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, 0, xformImage.getWidth(), xformImage.getHeight() ); ParameterBlockJAI thumbScaleParams = new ParameterBlockJAI( "affine" ); thumbScaleParams.addSource( xformImage ); thumbScaleParams.setParameter( "transform", thumbScale ); thumbScaleParams.setParameter( "interpolation", Interpolation.getInstance( Interpolation.INTERP_NEAREST ) ); PlanarImage thumbImage = JAI.create( "affine", thumbScaleParams ); // Save it FileOutputStream out = null; try { out = new FileOutputStream(thumbnailFile.getAbsolutePath()); } catch(IOException e) { log.error( "Error writing thumbnail: " + e.getMessage() ); // TODO: If we abort here due to image writing problem we will have // problems later with non-existing transaction. We should really // rethink the error handling login in the whole function. Anyway, we // haven't changed anything yet so we can safely commit the tx. txw.commit(); return; } String logStr = "Creating thumbnail for " + original.getImageFile().getAbsolutePath() + "\n" + "# bands: " + thumbImage.getNumBands(); for ( int band = 0; band < thumbImage.getNumBands(); band++ ) { logStr = logStr + "\nBand " + band + " size: " + thumbImage.getSampleModel().getSampleSize( band ); } log.debug( logStr ); if ( thumbImage.getSampleModel().getSampleSize( 0 ) == 16 ) { double[] subtract = new double[1]; subtract[0] = 0; double[] divide = new double[1]; divide[0] = 1./256.; // Now we can rescale the pixels gray levels: ParameterBlock pbRescale = new ParameterBlock(); pbRescale.add(divide); pbRescale.add(subtract); pbRescale.addSource( thumbImage ); PlanarImage outputImage = (PlanarImage)JAI.create("rescale", pbRescale, null); // Make sure it is a byte image - force conversion. ParameterBlock pbConvert = new ParameterBlock(); pbConvert.addSource(outputImage); pbConvert.add(DataBuffer.TYPE_BYTE); thumbImage = JAI.create("format", pbConvert); } JPEGEncodeParam encodeParam = new JPEGEncodeParam(); ImageEncoder encoder = ImageCodec.createImageEncoder("JPEG", out, encodeParam); try { encoder.encode( thumbImage ); out.close(); // origImage.dispose(); thumbImage.dispose(); } catch (Exception e) { log.error( "Error writing thumbnail for " + original.getImageFile().getAbsolutePath()+ ": " + e.getMessage() ); // TODO: If we abort here due to image writing problem we will have // problems later with non-existing transaction. We should really // rethink the error handling login in the whole function. Anyway, we // haven't changed anything yet so we can safely commit the tx. txw.commit();// txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); thumbInstance.setCropBounds( getCropBounds() ); log.debug( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); oldThumbnail = null; log.debug( "Thumbnail loaded" ); txw.commit(); }
txw.abort();
txw.commit();
protected void createThumbnail( VolumeBase volume ) { log.debug( "Creating thumbnail for " + uid ); ODMGXAWrapper txw = new ODMGXAWrapper(); txw.lock( this, Transaction.WRITE ); // Maximum size of the thumbnail int maxThumbWidth = 100; int maxThumbHeight = 100; checkCropBounds(); /* Determine the minimum size for the instance used for thumbnail creation to get decent image quality. The cropped portion of the image must be roughly the same resolution as the intended thumbnail. */ double cropWidth = cropMaxX - cropMinX; cropWidth = ( cropWidth > 0.000001 ) ? cropWidth : 1.0; double cropHeight = cropMaxY - cropMinY; cropHeight = ( cropHeight > 0.000001 ) ? cropHeight : 1.0; int minInstanceWidth = (int)(((double)maxThumbWidth)/cropWidth); int minInstanceHeight = (int)(((double)maxThumbHeight)/cropHeight); int minInstanceSide = Math.max( minInstanceWidth, minInstanceHeight ); // Find the original image to use as a staring point ImageInstance original = null; for ( int n = 0; n < instances.size(); n++ ) { ImageInstance instance = (ImageInstance) instances.get( n ); if ( instance.getInstanceType() == ImageInstance.INSTANCE_TYPE_ORIGINAL ) { original = instance; txw.lock( original, Transaction.READ ); break; } } if ( original == null || original.getImageFile() == null || !original.getImageFile().exists() ) { // If there are uncorrupted instances, no thumbnail can be created log.warn( "Error - no original image was found!!!" ); txw.commit(); return; } log.debug( "Found original, reading it..." ); /* We try to ensure that the thumbnail is actually from the original image by comparing aspect ratio of it to original. This is not a perfect check but it will usually catch the most typical errors (like having a the original rotated by RAW conversion SW but still the original EXIF thumbnail. */ double origAspect = this.getAspect( original.getWidth(), original.getHeight(), 1.0 ); double aspectAccuracy = 0.01; // First, check if there is a thumbnail in image header BufferedImage origImage = readExifThumbnail( original.getImageFile() ); if ( origImage == null || !isOkForThumbCreation( origImage.getWidth(), origImage.getHeight(), minInstanceWidth, minInstanceHeight, origAspect, aspectAccuracy ) ) { // Read the image try { File imageFile = original.getImageFile(); String fname = imageFile.getName(); int lastDotPos = fname.lastIndexOf( "." ); if ( lastDotPos <= 0 || lastDotPos >= fname.length()-1 ) { throw new IOException( "Cannot determine file type extension of " + imageFile.getAbsolutePath() ); } String suffix = fname.substring( lastDotPos+1 ); Iterator readers = ImageIO.getImageReadersBySuffix( suffix ); if ( !readers.hasNext() ) { throw new IOException( "Unknown image file extension " + suffix + "\nwhile reading " + imageFile.getAbsolutePath() ); } if ( readers.hasNext() ) { ImageReader reader = (ImageReader)readers.next(); log.debug( "Creating stream" ); ImageInputStream iis = ImageIO.createImageInputStream( original.getImageFile() ); reader.setInput( iis, false, false ); int numThumbs = 0; try { int numImages = reader.getNumImages( true ); numThumbs = reader.getNumThumbnails(0); } catch (IOException ex) { ex.printStackTrace(); } if ( numThumbs > 0 && isOkForThumbCreation( reader.getThumbnailWidth( 0, 0 ), reader.getThumbnailHeight( 0, 0 ) , minInstanceWidth, minInstanceHeight, origAspect, aspectAccuracy ) ) { // There is a thumbanil that is big enough - use it log.debug( "Original has thumbnail, size " + reader.getThumbnailWidth( 0, 0 ) + " x " + reader.getThumbnailHeight( 0, 0 ) ); origImage = reader.readThumbnail( 0, 0 ); log.debug( "Read thumbnail" ); } else { log.debug( "No thumbnail in original" ); ImageReadParam param = reader.getDefaultReadParam(); // Find the maximum subsampling rate we can still use for creating // a quality thumbnail. Some image format readers seem to have // problems with subsampling values (e.g. PNG sometimes crashed // the whole virtual machine, to for now let's do this only // with JPG. int subsampling = 1; if ( suffix.toLowerCase().equals( "jpg" ) ) { int minDim = Math.min( reader.getWidth( 0 ),reader.getHeight( 0 ) ); while ( 2 * minInstanceSide * subsampling < minDim ) { subsampling *= 2; } } param.setSourceSubsampling( subsampling, subsampling, 0, 0 ); origImage = reader.read( 0, param ); // Image image = JAI.create( "fileload", original.getImageFile() ); log.debug( "Read original" ); } iis.close(); } } catch ( IOException e ) { log.warn( "Error reading image: " + e.getMessage() ); txw.abort(); return; } } log.debug( "Done, finding name" ); // Find where to store the file in the target volume File thumbnailFile = volume.getInstanceName( this, "jpg" ); log.debug( "name = " + thumbnailFile.getName() ); // Shrink the image to desired state and save it // Find first the correct transformation for doing this int origWidth = origImage.getWidth(); int origHeight = origImage.getHeight(); AffineTransform xform = org.photovault.image.ImageXform.getRotateXform( prefRotation -original.getRotated(), origWidth, origHeight ); ParameterBlockJAI rotParams = new ParameterBlockJAI( "affine" ); rotParams.addSource( origImage ); rotParams.setParameter( "transform", xform ); rotParams.setParameter( "interpolation", Interpolation.getInstance( Interpolation.INTERP_NEAREST ) ); RenderedOp rotatedImage = JAI.create( "affine", rotParams ); ParameterBlockJAI cropParams = new ParameterBlockJAI( "crop" ); cropParams.addSource( rotatedImage ); float cropX = (float)( Math.rint( rotatedImage.getMinX() + cropMinX * rotatedImage.getWidth() ) ); float cropY = (float)( Math.rint( rotatedImage.getMinY() + cropMinY * rotatedImage.getHeight())); float cropW = (float)( Math.rint((cropWidth) * rotatedImage.getWidth() ) ); float cropH = (float) ( Math.rint((cropHeight) * rotatedImage.getHeight() )); cropParams.setParameter( "x", cropX ); cropParams.setParameter( "y", cropY ); cropParams.setParameter( "width", cropW ); cropParams.setParameter( "height", cropH ); RenderedOp cropped = JAI.create("crop", cropParams, null); // Translate the image so that it begins in origo ParameterBlockJAI pbXlate = new ParameterBlockJAI( "translate" ); pbXlate.addSource( cropped ); pbXlate.setParameter( "xTrans", (float) (-cropped.getMinX() ) ); pbXlate.setParameter( "yTrans", (float) (-cropped.getMinY() ) ); RenderedOp xformImage = JAI.create( "translate", pbXlate ); // Finally, scale this to thumbnail AffineTransform thumbScale = org.photovault.image.ImageXform.getFittingXform( maxThumbWidth, maxThumbHeight, 0, xformImage.getWidth(), xformImage.getHeight() ); ParameterBlockJAI thumbScaleParams = new ParameterBlockJAI( "affine" ); thumbScaleParams.addSource( xformImage ); thumbScaleParams.setParameter( "transform", thumbScale ); thumbScaleParams.setParameter( "interpolation", Interpolation.getInstance( Interpolation.INTERP_NEAREST ) ); PlanarImage thumbImage = JAI.create( "affine", thumbScaleParams ); // Save it FileOutputStream out = null; try { out = new FileOutputStream(thumbnailFile.getAbsolutePath()); } catch(IOException e) { log.error( "Error writing thumbnail: " + e.getMessage() ); // TODO: If we abort here due to image writing problem we will have // problems later with non-existing transaction. We should really // rethink the error handling login in the whole function. Anyway, we // haven't changed anything yet so we can safely commit the tx. txw.commit(); return; } String logStr = "Creating thumbnail for " + original.getImageFile().getAbsolutePath() + "\n" + "# bands: " + thumbImage.getNumBands(); for ( int band = 0; band < thumbImage.getNumBands(); band++ ) { logStr = logStr + "\nBand " + band + " size: " + thumbImage.getSampleModel().getSampleSize( band ); } log.debug( logStr ); if ( thumbImage.getSampleModel().getSampleSize( 0 ) == 16 ) { double[] subtract = new double[1]; subtract[0] = 0; double[] divide = new double[1]; divide[0] = 1./256.; // Now we can rescale the pixels gray levels: ParameterBlock pbRescale = new ParameterBlock(); pbRescale.add(divide); pbRescale.add(subtract); pbRescale.addSource( thumbImage ); PlanarImage outputImage = (PlanarImage)JAI.create("rescale", pbRescale, null); // Make sure it is a byte image - force conversion. ParameterBlock pbConvert = new ParameterBlock(); pbConvert.addSource(outputImage); pbConvert.add(DataBuffer.TYPE_BYTE); thumbImage = JAI.create("format", pbConvert); } JPEGEncodeParam encodeParam = new JPEGEncodeParam(); ImageEncoder encoder = ImageCodec.createImageEncoder("JPEG", out, encodeParam); try { encoder.encode( thumbImage ); out.close(); // origImage.dispose(); thumbImage.dispose(); } catch (Exception e) { log.error( "Error writing thumbnail for " + original.getImageFile().getAbsolutePath()+ ": " + e.getMessage() ); // TODO: If we abort here due to image writing problem we will have // problems later with non-existing transaction. We should really // rethink the error handling login in the whole function. Anyway, we // haven't changed anything yet so we can safely commit the tx. txw.commit();// txw.abort(); return; } // add the created instance to this persistent object ImageInstance thumbInstance = addInstance( volume, thumbnailFile, ImageInstance.INSTANCE_TYPE_THUMBNAIL ); thumbInstance.setRotated( prefRotation -original.getRotated() ); thumbInstance.setCropBounds( getCropBounds() ); log.debug( "Loading thumbnail..." ); thumbnail = Thumbnail.createThumbnail( this, thumbnailFile ); oldThumbnail = null; log.debug( "Thumbnail loaded" ); txw.commit(); }
long sep = Chromosome.getMarker(pos1).getPosition() - Chromosome.getMarker(pos2).getPosition();
long sep = Chromosome.getMarker(pos2).getPosition() - Chromosome.getMarker(pos1).getPosition();
void generateDPrimeTable(long maxdist){ //calculating D prime requires the number of each possible 2 marker //haplotype in the dataset dPrimeTable = new PairwiseLinkage[Chromosome.getSize()][Chromosome.getSize()]; int doublehet; long negMaxdist = -1*maxdist; int[][] twoMarkerHaplos = new int[3][3]; totalComps = (Chromosome.getSize()*(Chromosome.getSize()-1))/2; //System.out.println(totalComps); compsDone =0; //loop through all marker pairs for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ //clear the array for (int pos1 = 0; pos1 < pos2; pos1++){ compsDone++; long sep = Chromosome.getMarker(pos1).getPosition() - Chromosome.getMarker(pos2).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ dPrimeTable[pos1][pos2] = null; continue; } } for (int i = 0; i < twoMarkerHaplos.length; i++){ for (int j = 0; j < twoMarkerHaplos[i].length; j++){ twoMarkerHaplos[i][j] = 0; } } doublehet = 0; //get the alleles for the markers int m1a1 = 0; int m1a2 = 0; int m2a1 = 0; int m2a2 = 0; int m1H = 0; int m2H = 0; for (int i = 0; i < chromosomes.size(); i++){ byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (m1a1 > 0){ if (m1a2 == 0 && !(a1 == 5) && !(a1 == 0) && a1 != m1a1) m1a2 = a1; } else if (!(a1 == 5) && !(a1 == 0)) m1a1=a1; if (m2a1 > 0){ if (m2a2 == 0 && !(a2 == 5) && !(a2 == 0) && a2 != m2a1) m2a2 = a2; } else if (!(a2 == 5) && !(a2 == 0)) m2a1=a2; if (a1 == 5) m1H++; if (a2 == 5) m2H++; } //check for non-polymorphic markers if (m1a2==0){ if (m1H==0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); continue; } else { if (m1a1 == 1){ m1a2=2; } else { m1a2 = 1; } } } if (m2a2==0){ if (m2H==0){ dPrimeTable[pos1][pos2] = null;//new PairwiseLinkage(0,0,0,0,0,nullArray); //TODO: look into these nulls (c.f. clean.ped adding in bad m's) continue; } else { if (m2a1 == 1){ m2a2=2; } else { m2a2 = 1; } } } int[] marker1num = new int[5]; int[] marker2num = new int[5]; marker1num[0]=0; marker1num[m1a1]=1; marker1num[m1a2]=2; marker2num[0]=0; marker2num[m2a1]=1; marker2num[m2a2]=2; //iterate through all chromosomes in dataset for (int i = 0; i < chromosomes.size(); i++){ //System.out.println(i + " " + pos1 + " " + pos2); //assign alleles for each of a pair of chromosomes at a marker to four variables byte a1 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos1); byte a2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); byte b1 = ((Chromosome) chromosomes.elementAt(++i)).getGenotype(pos1); byte b2 = ((Chromosome) chromosomes.elementAt(i)).getGenotype(pos2); if (a1 == 0 || a2 == 0 || b1 == 0 || b2 == 0){ //skip missing data } else if ((a1 == 5 && a2 == 5) || (a1 == 5 && !(a2 == b2)) || (a2 == 5 && !(a1 == b1))) doublehet++; //find doublehets and resolved haplotypes else if (a1 == 5){ twoMarkerHaplos[1][marker2num[a2]]++; twoMarkerHaplos[2][marker2num[a2]]++; } else if (a2 == 5){ twoMarkerHaplos[marker1num[a1]][1]++; twoMarkerHaplos[marker1num[a1]][2]++; } else { twoMarkerHaplos[marker1num[a1]][marker2num[a2]]++; twoMarkerHaplos[marker1num[b1]][marker2num[b2]]++; } } //another monomorphic marker check int r1, r2, c1, c2; r1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[1][2]; r2 = twoMarkerHaplos[2][1] + twoMarkerHaplos[2][2]; c1 = twoMarkerHaplos[1][1] + twoMarkerHaplos[2][1]; c2 = twoMarkerHaplos[1][2] + twoMarkerHaplos[2][2]; if ( (r1==0 || r2==0 || c1==0 || c2==0) && doublehet == 0){ dPrimeTable[pos1][pos2] = new PairwiseLinkage(1,0,0,0,0,new double[0]); continue; } //compute D Prime for this pair of markers. //return is a tab delimited string of d', lod, r^2, CI(low), CI(high) dPrimeTable[pos1][pos2] = computeDPrime(twoMarkerHaplos[1][1], twoMarkerHaplos[1][2], twoMarkerHaplos[2][1], twoMarkerHaplos[2][2], doublehet, 0.1); this.realCompsDone++; } } filteredDPrimeTable = getFilteredTable(); }
isInBlock[markers[j]] = true;
isInBlock[Chromosome.realIndex[markers[j]]] = true;
void guessBlocks(int method){ Vector returnVec = new Vector(); switch(method){ case 0: returnVec = FindBlocks.doSFS(filteredDPrimeTable); break; case 1: returnVec = FindBlocks.do4Gamete(filteredDPrimeTable,0.01); break; case 2: returnVec = FindBlocks.doMJD(filteredDPrimeTable); break; case 3: returnVec = new Vector();break; } blocks = returnVec; //keep track of which markers are in a block isInBlock = new boolean[Chromosome.getSize()]; for (int i = 0; i < isInBlock.length; i++){ isInBlock[i] = false; } for (int i = 0; i < blocks.size(); i++){ int[] markers = (int[])blocks.elementAt(i); for (int j = 0; j < markers.length; j++){ isInBlock[markers[j]] = true; } } }
if (!even){ throw new HaploViewException("Genotype file appears to have an odd number of lines.\n"+ "Each individual is required to have two chromosomes"); }
void prepareHapsInput(File infile) throws IOException, HaploViewException{ //this method is called to suck in data from a file (its only argument) //of genotypes and sets up the Chromosome objects. String currentLine; Vector chroms = new Vector(); byte[] genos = new byte[0]; String ped, indiv; if(infile.length() < 1){ throw new HaploViewException("Genotype file is empty or does not exist: " + infile.getName()); } //read the file: BufferedReader in = new BufferedReader(new FileReader(infile)); int lineCount = 0; int numTokens = 0; while ((currentLine = in.readLine()) != null){ lineCount++; //each line is expected to be of the format: //ped indiv geno geno geno geno... StringTokenizer st = new StringTokenizer(currentLine); //first two tokens are expected to be ped, indiv ped = st.nextToken(); indiv = st.nextToken(); //all other tokens are loaded into a vector (they should all be genotypes) genos = new byte[st.countTokens()]; int q = 0; if (numTokens == 0){ numTokens = st.countTokens(); } if (numTokens != st.countTokens()){ throw new HaploViewException("Genotype file error:\nLine " + lineCount + " appears to have an incorrect number of entries"); } while (st.hasMoreTokens()){ String thisGenotype = (String)st.nextElement(); if (thisGenotype.equals("h")) { genos[q] = 5; }else{ try{ genos[q] = Byte.parseByte(thisGenotype); }catch (NumberFormatException nfe){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + thisGenotype + "\" on line " + lineCount + " not allowed."); } } if (genos[q] < 0 || genos[q] > 5){ throw new HaploViewException("Genotype file input error:\ngenotype value \"" + genos[q] + "\" on line " + lineCount + " not allowed."); } q++; } //a Chromosome is created and added to a vector of chromosomes. //this is what is evetually returned. chroms.add(new Chromosome(ped, indiv, genos, infile.getName())); } chromosomes = chroms; //initialize realIndex Chromosome.realIndex = new int[genos.length]; for (int i = 0; i < genos.length; i++){ Chromosome.realIndex[i] = i; } try{ prepareMarkerInput(null,0); }catch(HaploViewException e){ } }
markerInfo.add(new SNP(String.valueOf(i+1), (i*3000), maf));
markerInfo.add(new SNP(String.valueOf(i+1), (i*4000), maf));
void prepareMarkerInput(File infile, long maxdist) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; Vector markers = new Vector(); long negMaxdist = -1 * maxdist; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } if (lineCount > Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nToo many markers")); } //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).getGenotype(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } //System.out.println(numa1 + " " + numa2); double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; String name = st.nextToken(); String loc = st.nextToken(); try{ markers.add(new SNP(name, Long.parseLong(loc), infile.getName(), maf)); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + loc + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } snpcount ++; } if (lineCount < Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nNot enough markers")); } Chromosome.markers = markers.toArray(); if (dPrimeTable != null){ //loop through the dprime table to null-out distant markers for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = Chromosome.getMarker(pos1).getPosition() - Chromosome.getMarker(pos2).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ dPrimeTable[pos1][pos2] = null; continue; } } } } } infoKnown=true; }else{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i+1), (i*3000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
saveDprimeWriter.write((i+1) + "\t" + (j+1) + "\t" + dPrimeTable[i][j].toString() + "\t" + dist + "\n");
saveDprimeWriter.write(Chromosome.getFilteredMarker(i).getName() + "\t" + Chromosome.getFilteredMarker(j).getName() + "\t" + dPrimeTable[i][j].toString() + "\t" + dist + "\n");
public void saveDprimeToText(PairwiseLinkage[][] dPrimeTable, File dumpDprimeFile, boolean info) throws IOException{ FileWriter saveDprimeWriter = new FileWriter(dumpDprimeFile); if (info){ saveDprimeWriter.write("L1\tL2\tD'\tLOD\tr^2\tCIlow\tCIhi\tDist\n"); long dist; for (int i = 0; i < dPrimeTable.length; i++){ for (int j = 0; j < dPrimeTable[i].length; j++){ //many "slots" in table aren't filled in because it is a 1/2 matrix if (i < j){ if(dPrimeTable[i][j] != null) { dist = (Chromosome.getFilteredMarker(j)).getPosition() - (Chromosome.getFilteredMarker(i)).getPosition(); saveDprimeWriter.write((i+1) + "\t" + (j+1) + "\t" + dPrimeTable[i][j].toString() + "\t" + dist + "\n"); } } } } }else{ saveDprimeWriter.write("L1\tL2\tD'\tLOD\tr^2\tCIlow\tCIhi\n"); for (int i = 0; i < dPrimeTable.length; i++){ for (int j = 0; j < dPrimeTable[i].length; j++){ //many "slots" in table aren't filled in because it is a 1/2 matrix if (i < j){ if(dPrimeTable[i][j] != null) { saveDprimeWriter.write((i+1) + "\t" + (j+1) + "\t" + dPrimeTable[i][j] + "\n"); } } } } } saveDprimeWriter.close(); }
fkCol.setPrimaryKeySeq(null);
public void attachRelationship(SQLTable pkTable, SQLTable fkTable, boolean autoGenerateMapping) throws ArchitectException { if(pkTable == null) throw new NullPointerException("Null pkTable not allowed"); if(fkTable == null) throw new NullPointerException("Null fkTable not allowed"); SQLTable oldPkt = this.pkTable; SQLTable oldFkt = this.fkTable; if (this.pkTable != null || this.fkTable != null) { this.detachListeners(); } try { this.pkTable = pkTable; this.fkTable = fkTable; this.fireDbObjectChanged("pkTable",oldPkt,pkTable); this.fireDbObjectChanged("fkTable",oldFkt,fkTable); fkTable.getColumnsFolder().setMagicEnabled(false); fkTable.getImportedKeysFolder().setMagicEnabled(false); pkTable.addExportedKey(this); fkTable.addImportedKey(this); if (autoGenerateMapping) { // iterate over a copy of pktable's column list to avoid comodification // when creating a self-referencing table java.util.List<SQLColumn> pkColListCopy = new ArrayList<SQLColumn>(pkTable.getColumns().size()); pkColListCopy.addAll(pkTable.getColumns()); for (SQLColumn pkCol : pkColListCopy) { if (pkCol.getPrimaryKeySeq() == null) break; SQLColumn fkCol; SQLColumn match = fkTable.getColumnByName(pkCol.getName()); if (match != null) { // does the matching column have a compatible data type? if (match.getType() == pkCol.getType() && match.getPrecision() == pkCol.getPrecision() && match.getScale() == pkCol.getScale()) { // column is an exact match, so we don't have to recreate it fkCol = match; } else { fkCol = new SQLColumn(pkCol); fkCol.setName(generateUniqueColumnName(pkCol,fkTable)); } } else { // no match, so we need to import this column from PK table fkCol = new SQLColumn(pkCol); } this.addMapping(pkCol, fkCol); } } realizeMapping(); this.attachListeners(); } finally { if ( fkTable != null ) { fkTable.getColumnsFolder().setMagicEnabled(true); fkTable.getImportedKeysFolder().setMagicEnabled(true); } } }
for (ColumnMapping m : getMappings()) { SQLColumn fkCol = m.getFkColumn(); try { fkCol.setMagicEnabled(false); if (fkCol.getReferenceCount() == 0) fkCol.addReference(); fkTable.addColumn(fkCol); if (fkCol.getReferenceCount() <= 0) throw new IllegalStateException("Created a column with 0 references!"); if (identifying && fkCol.getPrimaryKeySeq() == null) { fkCol.setPrimaryKeySeq(new Integer(fkTable.getPkSize())); } } finally { fkCol.setMagicEnabled(true); } } }
for (ColumnMapping m : getMappings()) { SQLColumn fkCol = m.getFkColumn(); try { fkCol.setMagicEnabled(false); if (fkCol.getReferenceCount() == 0) fkCol.addReference(); int insertIdx; if (identifying) { if (fkCol.getPrimaryKeySeq() == null) { insertIdx = fkTable.getPkSize(); } else { insertIdx = fkCol.getPrimaryKeySeq(); } } else { if (fkCol.getPrimaryKeySeq() != null) { insertIdx = fkCol.getPrimaryKeySeq(); } else { insertIdx = fkTable.getColumns().size(); } } fkTable.addColumn(insertIdx, fkCol); logger.debug("Added column '" + fkCol.getName() + "' at index " + insertIdx); if (fkCol.getReferenceCount() <= 0) throw new IllegalStateException("Created a column with 0 references!"); if (identifying && fkCol.getPrimaryKeySeq() == null) { fkCol.setPrimaryKeySeq(new Integer(fkTable.getPkSize())); } } finally { fkCol.setMagicEnabled(true); } } }
private void realizeMapping() throws ArchitectException { for (ColumnMapping m : getMappings()) { SQLColumn fkCol = m.getFkColumn(); try { fkCol.setMagicEnabled(false); if (fkCol.getReferenceCount() == 0) fkCol.addReference(); // This might bump up the reference count (which would be correct) fkTable.addColumn(fkCol); if (fkCol.getReferenceCount() <= 0) throw new IllegalStateException("Created a column with 0 references!"); if (identifying && fkCol.getPrimaryKeySeq() == null) { fkCol.setPrimaryKeySeq(new Integer(fkTable.getPkSize())); } } finally { fkCol.setMagicEnabled(true); } } }
context = new Context();
context = new JellyContext();
public void setUp() { context = new Context(); context.setVariable( "foo", "abc" ); context.setVariable( "bar", new Integer( 123 ) ); factory = new BeanShellExpressionFactory(); }
Iterator iter = select.selectNodes( getXPathContext() ).iterator();
List nodes = select.selectNodes( getXPathContext() ); if (xpCmp != null && (xpCmp.getXpath() != null)) { Collections.sort(nodes, xpCmp); } Iterator iter = nodes.iterator();
public void doTag(XMLOutput output) throws Exception { if (select != null) { Iterator iter = select.selectNodes( getXPathContext() ).iterator(); while (iter.hasNext()) { iterationValue = iter.next(); if (var != null) { context.setVariable(var, iterationValue); } invokeBody(output); } } }
metaAssoc.add(viewItems[VIEW_TDT_NUM], tdtPanel);
metaAssoc.add("Single Marker", tdtPanel);
void readGenotypes(String[] inputOptions, int type){ //input is a 2 element array with //inputOptions[0] = ped file //inputOptions[1] = info file (null if none) //type is either 3 or 4 for ped and hapmap files respectively final File inFile = new File(inputOptions[0]); try { if (inFile.length() < 1){ throw new HaploViewException("Genotype file is empty or nonexistent: " + inFile.getName()); } if (type == HAPS){ //these are not available for non ped files viewMenuItems[VIEW_CHECK_NUM].setEnabled(false); viewMenuItems[VIEW_TDT_NUM].setEnabled(false); Options.setAssocTest(ASSOC_NONE); } theData = new HaploData(); if (type == HAPS){ theData.prepareHapsInput(new File(inputOptions[0])); }else{ theData.linkageToChrom(inFile, type); } //deal with marker information theData.infoKnown = false; File markerFile; if (inputOptions[1] == null){ markerFile = null; }else{ markerFile = new File(inputOptions[1]); } checkPanel = null; if (type == HAPS){ readMarkers(markerFile, null); }else{ readMarkers(markerFile, theData.getPedFile().getHMInfo()); checkPanel = new CheckDataPanel(theData, true); checkPanel.setAlignmentX(Component.CENTER_ALIGNMENT); } //let's start the math this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); final SwingWorker worker = new SwingWorker(){ public Object construct(){ dPrimeDisplay=null; changeKey(1); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first colorMenuItems[0].setSelected(true); blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); displayMenu.setEnabled(true); analysisMenu.setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); JLabel countsLabel = new JLabel("Using " + theData.numSingletons + " singletons and " + theData.numTrios + " trios from " + theData.numPeds + " families."); if (theData.numTrios + theData.numSingletons == 0){ countsLabel.setForeground(Color.red); } countsLabel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(countsLabel); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(Options.getAssocTest() != ASSOC_NONE) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; JTabbedPane metaAssoc = new JTabbedPane(); try{ tdtPanel = new TDTPanel(theData.getPedFile()); } catch(PedFileException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } metaAssoc.add(viewItems[VIEW_TDT_NUM], tdtPanel); HaploAssocPanel htp = new HaploAssocPanel(theData.getHaplotypes()); metaAssoc.add("Haplotypes", htp); tabs.addTab("Association Results", metaAssoc); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; } }; timer = new javax.swing.Timer(50, new ActionListener(){ public void actionPerformed(ActionEvent evt){ if (theData.finished){ timer.stop(); for (int i = 0; i < blockMenuItems.length; i++){ blockMenuItems[i].setEnabled(true); } clearBlocksItem.setEnabled(true); readMarkerItem.setEnabled(true); blocksItem.setEnabled(true); exportMenuItems[2].setEnabled(true); setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } } }); worker.start(); timer.start(); }catch(IOException ioexec) { JOptionPane.showMessageDialog(this, ioexec.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } catch(PedFileException pfe){ JOptionPane.showMessageDialog(this, pfe.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); }catch (HaploViewException hve){ JOptionPane.showMessageDialog(this, hve.getMessage(), "File Error", JOptionPane.ERROR_MESSAGE); } }
metaAssoc.add(viewItems[VIEW_TDT_NUM], tdtPanel);
metaAssoc.add("Single Marker", tdtPanel);
public Object construct(){ dPrimeDisplay=null; changeKey(1); theData.generateDPrimeTable(); theData.guessBlocks(BLOX_GABRIEL); //theData.guessBlocks(BLOX_NONE); //for debugging, doesn't call blocks at first colorMenuItems[0].setSelected(true); blockMenuItems[0].setSelected(true); zoomMenuItems[0].setSelected(true); theData.blocksChanged = false; Container contents = getContentPane(); contents.removeAll(); int currentTab = VIEW_D_NUM; /*if (!(tabs == null)){ currentTab = tabs.getSelectedIndex(); } */ tabs = new JTabbedPane(); tabs.addChangeListener(new TabChangeListener()); //first, draw the D' picture JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); dPrimeDisplay = new DPrimeDisplay(window); JScrollPane dPrimeScroller = new JScrollPane(dPrimeDisplay); dPrimeScroller.getViewport().setScrollMode(JViewport.BLIT_SCROLL_MODE); dPrimeScroller.getVerticalScrollBar().setUnitIncrement(60); dPrimeScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(dPrimeScroller); tabs.addTab(viewItems[VIEW_D_NUM], panel); viewMenuItems[VIEW_D_NUM].setEnabled(true); //compute and show haps on next tab panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); try { hapDisplay = new HaplotypeDisplay(theData); } catch(HaploViewException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } HaplotypeDisplayController hdc = new HaplotypeDisplayController(hapDisplay); hapScroller = new JScrollPane(hapDisplay); hapScroller.getVerticalScrollBar().setUnitIncrement(60); hapScroller.getHorizontalScrollBar().setUnitIncrement(60); panel.add(hapScroller); panel.add(hdc); tabs.addTab(viewItems[VIEW_HAP_NUM], panel); viewMenuItems[VIEW_HAP_NUM].setEnabled(true); displayMenu.setEnabled(true); analysisMenu.setEnabled(true); //LOD panel /*panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.Y_AXIS)); LODDisplay ld = new LODDisplay(theData); JScrollPane lodScroller = new JScrollPane(ld); panel.add(lodScroller); tabs.addTab(viewItems[VIEW_LOD_NUM], panel); viewMenuItems[VIEW_LOD_NUM].setEnabled(true);*/ //int optionalTabCount = 1; //check data panel if (checkPanel != null){ //optionalTabCount++; //VIEW_CHECK_NUM = optionalTabCount; //viewItems[VIEW_CHECK_NUM] = VIEW_CHECK_PANEL; JPanel metaCheckPanel = new JPanel(); metaCheckPanel.setLayout(new BoxLayout(metaCheckPanel, BoxLayout.Y_AXIS)); JLabel countsLabel = new JLabel("Using " + theData.numSingletons + " singletons and " + theData.numTrios + " trios from " + theData.numPeds + " families."); if (theData.numTrios + theData.numSingletons == 0){ countsLabel.setForeground(Color.red); } countsLabel.setAlignmentX(Component.CENTER_ALIGNMENT); metaCheckPanel.add(countsLabel); metaCheckPanel.add(checkPanel); cdc = new CheckDataController(window); metaCheckPanel.add(cdc); tabs.addTab(viewItems[VIEW_CHECK_NUM], metaCheckPanel); viewMenuItems[VIEW_CHECK_NUM].setEnabled(true); currentTab=VIEW_CHECK_NUM; } //TDT panel if(Options.getAssocTest() != ASSOC_NONE) { //optionalTabCount++; //VIEW_TDT_NUM = optionalTabCount; //viewItems[VIEW_TDT_NUM] = VIEW_TDT; JTabbedPane metaAssoc = new JTabbedPane(); try{ tdtPanel = new TDTPanel(theData.getPedFile()); } catch(PedFileException e) { JOptionPane.showMessageDialog(window, e.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } metaAssoc.add(viewItems[VIEW_TDT_NUM], tdtPanel); HaploAssocPanel htp = new HaploAssocPanel(theData.getHaplotypes()); metaAssoc.add("Haplotypes", htp); tabs.addTab("Association Results", metaAssoc); viewMenuItems[VIEW_TDT_NUM].setEnabled(true); } tabs.setSelectedIndex(currentTab); contents.add(tabs); repaint(); setVisible(true); theData.finished = true; setTitle(TITLE_STRING + " -- " + inFile.getName()); return null; }
Class type = (Class) tags.get(name); if ( type != null ) { return TagScript.newInstance(type);
Object value = tags.get(name); if (value instanceof Class) { Class type = (Class) value; if ( type != null ) { return TagScript.newInstance(type); } } else if (value instanceof TagFactory) { return new TagScript( (TagFactory) value );
public TagScript createTagScript(String name, Attributes attributes) throws Exception { Class type = (Class) tags.get(name); if ( type != null ) { return TagScript.newInstance(type); } return null; }
tc.dumpTags(validateOutputFile(fileName + ".TAGSNPS"));
private void processFile(String fileName, int fileType, String infoFileName){ try { HaploData textData; File outputFile; File inputFile; AssociationTestSet customAssocSet; if(!quietMode && fileName != null){ System.out.println("Using data file: " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); //Vector result = null; if(fileType == HAPS_FILE){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED_FILE) { //read in ped file textData.linkageToChrom(inputFile, PED_FILE); if(textData.getPedFile().isBogusParents()) { System.out.println("Error: One or more individuals in the file reference non-existent parents.\nThese references have been ignored."); } }else{ //read in hapmapfile textData.linkageToChrom(inputFile,HMP_FILE); } File infoFile = null; if (infoFileName != null){ infoFile = new File(infoFileName); } textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); HashSet whiteListedCustomMarkers = new HashSet(); if (customAssocTestsFileName != null){ customAssocSet = new AssociationTestSet(customAssocTestsFileName); whiteListedCustomMarkers = customAssocSet.getWhitelist(); }else{ customAssocSet = null; } Hashtable snpsByName = new Hashtable(); for(int i=0;i<Chromosome.getUnfilteredSize();i++) { SNP snp = Chromosome.getUnfilteredMarker(i); snpsByName.put(snp.getName(), snp); } if(forceIncludeTags != null) { for(int i=0;i<forceIncludeTags.size();i++) { if(snpsByName.containsKey(forceIncludeTags.get(i))) { whiteListedCustomMarkers.add(snpsByName.get(forceIncludeTags.get(i))); } } } textData.setWhiteList(whiteListedCustomMarkers); boolean[] markerResults = new boolean[Chromosome.getUnfilteredSize()]; Vector result = null; result = textData.getPedFile().getResults(); //once check has been run we can filter the markers for (int i = 0; i < result.size(); i++){ if (((((MarkerResult)result.get(i)).getRating() > 0 || skipCheck) && Chromosome.getUnfilteredMarker(i).getDupStatus() != 2)){ markerResults[i] = true; }else{ markerResults[i] = false; } } for (int i = 0; i < excludedMarkers.size(); i++){ int cur = ((Integer)excludedMarkers.elementAt(i)).intValue(); if (cur < 1 || cur > markerResults.length){ System.out.println("Excluded marker out of bounds: " + cur + "\nMarkers must be between 1 and N, where N is the total number of markers."); System.exit(1); }else{ markerResults[cur-1] = false; } } for(int i=0;i<Chromosome.getUnfilteredSize();i++) { if(textData.isWhiteListed(Chromosome.getUnfilteredMarker(i))) { markerResults[i] = true; } } Chromosome.doFilter(markerResults); if(!quietMode && infoFile != null){ System.out.println("Using marker information file: " + infoFile.getName()); } if(outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData); cp.printTable(validateOutputFile(fileName + ".CHECK")); } if(individualCheck && result != null){ IndividualDialog id = new IndividualDialog(textData); id.printTable(validateOutputFile(fileName +".INDCHECK")); } Vector cust = new Vector(); AssociationTestSet blockTestSet = null; if(blockOutputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; Haplotype[][] filtHaplos; switch(blockOutputType){ case BLOX_GABRIEL: outputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: outputFile = validateOutputFile(fileName + ".4GAMblocks"); break; case BLOX_SPINE: outputFile = validateOutputFile(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: outputFile = validateOutputFile(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(blockFileName); if(!quietMode) { System.out.println("Using custom blocks file " + blockFileName); } cust = textData.readBlocks(blocksFile); break; case BLOX_ALL: //handled below, so we don't do anything here outputFile = null; break; default: outputFile = validateOutputFile(fileName + ".GABRIELblocks"); break; } //this handles output type ALL if(blockOutputType == BLOX_ALL) { outputFile = validateOutputFile(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), outputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid Gabriel blocks."); } outputFile = validateOutputFile(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), outputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid 4 Gamete blocks."); } outputFile = validateOutputFile(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), outputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid LD Spine blocks."); } }else{ //guesses blocks based on output type determined above. textData.guessBlocks(blockOutputType, cust); haplos = textData.generateBlockHaplotypes(textData.blocks); if (haplos != null){ filtHaplos = filterHaplos(haplos); textData.pickTags(filtHaplos); textData.saveHapsToText(haplos, textData.computeMultiDprime(filtHaplos), outputFile); }else if (!quietMode){ System.out.println("Skipping block output: no valid blocks."); } } if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC) { if (blockOutputType == BLOX_ALL){ System.out.println("Haplotype association results cannot be used with block output \"ALL\""); }else{ if (haplos != null){ blockTestSet = new AssociationTestSet(haplos,null); blockTestSet.saveHapsToText(validateOutputFile(fileName + ".HAPASSOC")); }else if (!quietMode){ System.out.println("Skipping block association output: no valid blocks."); } } } } if(outputDprime) { outputFile = validateOutputFile(fileName + ".LD"); if (textData.dpTable != null){ textData.saveDprimeToText(outputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(outputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (outputPNG || outputCompressedPNG){ outputFile = validateOutputFile(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (trackFileName != null){ textData.readAnalysisTrack(new File(trackFileName)); if(!quietMode) { System.out.println("Using analysis track file " + trackFileName); } } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getUnfilteredSize(),outputCompressedPNG); try{ Jimi.putImage("image/png", i, outputFile.getAbsolutePath()); }catch(JimiException je){ System.out.println(je.getMessage()); } } AssociationTestSet markerTestSet =null; if(Options.getAssocTest() == ASSOC_TRIO || Options.getAssocTest() == ASSOC_CC){ if (randomizeAffection){ Vector aff = new Vector(); int j=0, k=0; for (int i = 0; i < textData.getPedFile().getNumIndividuals(); i++){ if (i%2 == 0){ aff.add(new Integer(1)); j++; }else{ aff.add(new Integer(2)); k++; } } Collections.shuffle(aff); markerTestSet = new AssociationTestSet(textData.getPedFile(),aff,Chromosome.getAllMarkers()); }else{ markerTestSet = new AssociationTestSet(textData.getPedFile(),null,Chromosome.getAllMarkers()); } markerTestSet.saveSNPsToText(validateOutputFile(fileName + ".ASSOC")); } if(customAssocSet != null) { if(!quietMode) { System.out.println("Using custom association test file " + customAssocTestsFileName); } try { customAssocSet.setPermTests(doPermutationTest); customAssocSet.runFileTests(textData,markerTestSet.getMarkerAssociationResults()); customAssocSet.saveResultsToText(validateOutputFile(fileName + ".CUSTASSOC")); }catch(IOException ioe) { System.out.println("An error occured writing the custom association results file."); customAssocSet = null; } } if(doPermutationTest) { AssociationTestSet permTests = new AssociationTestSet(); permTests.cat(markerTestSet); if(blockTestSet != null) { permTests.cat(blockTestSet); } final PermutationTestSet pts = new PermutationTestSet(permutationCount,textData.getPedFile(),customAssocSet,permTests); Thread permThread = new Thread(new Runnable() { public void run() { if (pts.isCustom()){ pts.doPermutations(PermutationTestSet.CUSTOM); }else{ pts.doPermutations(PermutationTestSet.SINGLE_PLUS_BLOCKS); } } }); permThread.start(); if(!quietMode) { System.out.println("Starting " + permutationCount + " permutation tests (each . printed represents 1% of tests completed)"); } int dotsPrinted =0; while(pts.getPermutationCount() - pts.getPermutationsPerformed() > 0) { while(( (double)pts.getPermutationsPerformed() / pts.getPermutationCount())*100 > dotsPrinted) { System.out.print("."); dotsPrinted++; } try{ Thread.sleep(100); }catch(InterruptedException ie) {} } System.out.println(); try { pts.writeResultsToFile(validateOutputFile(fileName + ".PERMUT")); } catch(IOException ioe) { System.out.println("An error occured while writing the permutation test results to file."); } } if(tagging != Tagger.NONE) { if(textData.dpTable == null) { textData.generateDPrimeTable(); } Vector snps = Chromosome.getAllMarkers(); HashSet names = new HashSet(); for (int i = 0; i < snps.size(); i++) { SNP snp = (SNP) snps.elementAt(i); names.add(snp.getName()); } HashSet filteredNames = new HashSet(); for(int i=0;i<Chromosome.getSize();i++) { filteredNames.add(Chromosome.getMarker(i).getName()); } Vector sitesToCapture = new Vector(); for(int i=0;i<Chromosome.getSize();i++) { sitesToCapture.add(Chromosome.getMarker(i)); } for (int i = 0; i < forceIncludeTags.size(); i++) { String s = (String) forceIncludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced included tags since I don't know about it."); } } for (int i = 0; i < forceExcludeTags.size(); i++) { String s = (String) forceExcludeTags.elementAt(i); if(!names.contains(s) && !quietMode) { System.out.println("Warning: skipping marker " + s + " in the list of forced excluded tags since I don't know about it."); } } //chuck out filtered jazz from excludes, and nonexistent markers from both forceExcludeTags.retainAll(filteredNames); forceIncludeTags.retainAll(names); if(!quietMode) { System.out.println("Starting tagging."); } TaggerController tc = new TaggerController(textData,forceIncludeTags,forceExcludeTags,sitesToCapture, tagging,maxNumTags,findTags); tc.runTagger(); while(!tc.isTaggingCompleted()) { try { Thread.sleep(100); }catch(InterruptedException ie) {} } tc.saveResultsToFile(validateOutputFile(fileName + ".TAGS")); tc.dumpTests(validateOutputFile(fileName + ".TESTS")); } } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
for (int colNo = 0; colNo < ncols; colNo++) { String contents = headings[colNo]; StringTokenizer st = new StringTokenizer(contents); while (st.hasMoreTokens()) { widths[colNo] = Math.max(widths[colNo], bf.getWidthPoint(st.nextToken(), colHeadingFSize));
for (int colNo = 0; colNo < ncols; colNo++) { String contents = headings[colNo]; StringTokenizer st = new StringTokenizer(contents); while (st.hasMoreTokens()) { widths[colNo] = Math.max(widths[colNo], bf.getWidthPoint(st.nextToken(), colHeadingFSize)); } Phrase colTitle = new Phrase(contents, colHeadingFont); PdfPCell cell = new PdfPCell(colTitle); cell.setBorder(Rectangle.BOTTOM | Rectangle.TOP); cell.setBorderWidth(2); cell.setBackgroundColor(new Color(200, 200, 200)); cell.setHorizontalAlignment(Element.ALIGN_CENTER); table.addCell(cell);
private void addHeaderRow(ProfileManager pm, SQLTable sqlTable, PdfPTable table, BaseFont bf, float titleFSize, float colHeadingFSize, float[] widths) throws DocumentException, IOException, ArchitectException { int ncols = headings.length; Font titleFont = new Font(bf, titleFSize, Font.BOLD); Font colHeadingFont = new Font(bf, colHeadingFSize); TableProfileResult tProfile = (TableProfileResult) pm.getResult(sqlTable); PdfPTable infoTable = new PdfPTable(2); StringBuffer heading = new StringBuffer(); if ( tProfile.isError() ) { heading.append("Table: ").append(sqlTable.getName()); heading.append(" Profiling Error"); if ( tProfile.getEx() != null ) heading.append(":\n").append(tProfile.getEx()); } else { PdfPCell infoCell; heading.append("Table: ").append(sqlTable.getName()); infoCell = new PdfPCell(new Phrase("Row Count:",colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoCell.setHorizontalAlignment(Element.ALIGN_RIGHT); infoTable.addCell(infoCell); infoCell = new PdfPCell(new Phrase(String.valueOf(tProfile.getRowCount()), colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoTable.addCell(infoCell); infoCell = new PdfPCell(new Phrase("Create Date:",colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoCell.setHorizontalAlignment(Element.ALIGN_RIGHT); infoTable.addCell(infoCell); DateFormat df = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); infoCell = new PdfPCell(new Phrase(df.format(tProfile.getCreateDate()), colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoTable.addCell(infoCell); infoCell = new PdfPCell(new Phrase("Elapsed:",colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoCell.setHorizontalAlignment(Element.ALIGN_RIGHT); infoTable.addCell(infoCell); infoCell = new PdfPCell(new Phrase(tProfile.getTimeToCreate()+"ms", colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoTable.addCell(infoCell); } PdfPCell hcell = new PdfPCell(new Phrase(heading.toString(), titleFont)); hcell.setColspan(ncols - 2); hcell.setBorder(Rectangle.NO_BORDER); hcell.setVerticalAlignment(Element.ALIGN_BOTTOM); table.addCell(hcell); hcell = new PdfPCell(infoTable); hcell.setColspan(2); hcell.setBorder(Rectangle.NO_BORDER); table.addCell(hcell); for (int colNo = 0; colNo < ncols; colNo++) { String contents = headings[colNo]; // ensure column width is at least enough for widest word in heading StringTokenizer st = new StringTokenizer(contents); while (st.hasMoreTokens()) { widths[colNo] = Math.max(widths[colNo], bf.getWidthPoint(st.nextToken(), colHeadingFSize)); } Phrase colTitle = new Phrase(contents, colHeadingFont); PdfPCell cell = new PdfPCell(colTitle); cell.setBorder(Rectangle.BOTTOM | Rectangle.TOP); cell.setBorderWidth(2); cell.setBackgroundColor(new Color(200, 200, 200)); cell.setHorizontalAlignment(Element.ALIGN_CENTER); table.addCell(cell); } table.setHeaderRows(2); }
Phrase colTitle = new Phrase(contents, colHeadingFont); PdfPCell cell = new PdfPCell(colTitle); cell.setBorder(Rectangle.BOTTOM | Rectangle.TOP); cell.setBorderWidth(2); cell.setBackgroundColor(new Color(200, 200, 200)); cell.setHorizontalAlignment(Element.ALIGN_CENTER); table.addCell(cell);
private void addHeaderRow(ProfileManager pm, SQLTable sqlTable, PdfPTable table, BaseFont bf, float titleFSize, float colHeadingFSize, float[] widths) throws DocumentException, IOException, ArchitectException { int ncols = headings.length; Font titleFont = new Font(bf, titleFSize, Font.BOLD); Font colHeadingFont = new Font(bf, colHeadingFSize); TableProfileResult tProfile = (TableProfileResult) pm.getResult(sqlTable); PdfPTable infoTable = new PdfPTable(2); StringBuffer heading = new StringBuffer(); if ( tProfile.isError() ) { heading.append("Table: ").append(sqlTable.getName()); heading.append(" Profiling Error"); if ( tProfile.getEx() != null ) heading.append(":\n").append(tProfile.getEx()); } else { PdfPCell infoCell; heading.append("Table: ").append(sqlTable.getName()); infoCell = new PdfPCell(new Phrase("Row Count:",colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoCell.setHorizontalAlignment(Element.ALIGN_RIGHT); infoTable.addCell(infoCell); infoCell = new PdfPCell(new Phrase(String.valueOf(tProfile.getRowCount()), colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoTable.addCell(infoCell); infoCell = new PdfPCell(new Phrase("Create Date:",colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoCell.setHorizontalAlignment(Element.ALIGN_RIGHT); infoTable.addCell(infoCell); DateFormat df = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); infoCell = new PdfPCell(new Phrase(df.format(tProfile.getCreateDate()), colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoTable.addCell(infoCell); infoCell = new PdfPCell(new Phrase("Elapsed:",colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoCell.setHorizontalAlignment(Element.ALIGN_RIGHT); infoTable.addCell(infoCell); infoCell = new PdfPCell(new Phrase(tProfile.getTimeToCreate()+"ms", colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoTable.addCell(infoCell); } PdfPCell hcell = new PdfPCell(new Phrase(heading.toString(), titleFont)); hcell.setColspan(ncols - 2); hcell.setBorder(Rectangle.NO_BORDER); hcell.setVerticalAlignment(Element.ALIGN_BOTTOM); table.addCell(hcell); hcell = new PdfPCell(infoTable); hcell.setColspan(2); hcell.setBorder(Rectangle.NO_BORDER); table.addCell(hcell); for (int colNo = 0; colNo < ncols; colNo++) { String contents = headings[colNo]; // ensure column width is at least enough for widest word in heading StringTokenizer st = new StringTokenizer(contents); while (st.hasMoreTokens()) { widths[colNo] = Math.max(widths[colNo], bf.getWidthPoint(st.nextToken(), colHeadingFSize)); } Phrase colTitle = new Phrase(contents, colHeadingFont); PdfPCell cell = new PdfPCell(colTitle); cell.setBorder(Rectangle.BOTTOM | Rectangle.TOP); cell.setBorderWidth(2); cell.setBackgroundColor(new Color(200, 200, 200)); cell.setHorizontalAlignment(Element.ALIGN_CENTER); table.addCell(cell); } table.setHeaderRows(2); }
else { hcell = new PdfPCell(new Phrase("No Column Found in the table", titleFont)); hcell.setColspan(ncols); hcell.setBorder(Rectangle.BOTTOM); hcell.setVerticalAlignment(Element.ALIGN_LEFT); table.addCell(hcell); }
private void addHeaderRow(ProfileManager pm, SQLTable sqlTable, PdfPTable table, BaseFont bf, float titleFSize, float colHeadingFSize, float[] widths) throws DocumentException, IOException, ArchitectException { int ncols = headings.length; Font titleFont = new Font(bf, titleFSize, Font.BOLD); Font colHeadingFont = new Font(bf, colHeadingFSize); TableProfileResult tProfile = (TableProfileResult) pm.getResult(sqlTable); PdfPTable infoTable = new PdfPTable(2); StringBuffer heading = new StringBuffer(); if ( tProfile.isError() ) { heading.append("Table: ").append(sqlTable.getName()); heading.append(" Profiling Error"); if ( tProfile.getEx() != null ) heading.append(":\n").append(tProfile.getEx()); } else { PdfPCell infoCell; heading.append("Table: ").append(sqlTable.getName()); infoCell = new PdfPCell(new Phrase("Row Count:",colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoCell.setHorizontalAlignment(Element.ALIGN_RIGHT); infoTable.addCell(infoCell); infoCell = new PdfPCell(new Phrase(String.valueOf(tProfile.getRowCount()), colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoTable.addCell(infoCell); infoCell = new PdfPCell(new Phrase("Create Date:",colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoCell.setHorizontalAlignment(Element.ALIGN_RIGHT); infoTable.addCell(infoCell); DateFormat df = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); infoCell = new PdfPCell(new Phrase(df.format(tProfile.getCreateDate()), colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoTable.addCell(infoCell); infoCell = new PdfPCell(new Phrase("Elapsed:",colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoCell.setHorizontalAlignment(Element.ALIGN_RIGHT); infoTable.addCell(infoCell); infoCell = new PdfPCell(new Phrase(tProfile.getTimeToCreate()+"ms", colHeadingFont)); infoCell.setBorder(Rectangle.NO_BORDER); infoTable.addCell(infoCell); } PdfPCell hcell = new PdfPCell(new Phrase(heading.toString(), titleFont)); hcell.setColspan(ncols - 2); hcell.setBorder(Rectangle.NO_BORDER); hcell.setVerticalAlignment(Element.ALIGN_BOTTOM); table.addCell(hcell); hcell = new PdfPCell(infoTable); hcell.setColspan(2); hcell.setBorder(Rectangle.NO_BORDER); table.addCell(hcell); for (int colNo = 0; colNo < ncols; colNo++) { String contents = headings[colNo]; // ensure column width is at least enough for widest word in heading StringTokenizer st = new StringTokenizer(contents); while (st.hasMoreTokens()) { widths[colNo] = Math.max(widths[colNo], bf.getWidthPoint(st.nextToken(), colHeadingFSize)); } Phrase colTitle = new Phrase(contents, colHeadingFont); PdfPCell cell = new PdfPCell(colTitle); cell.setBorder(Rectangle.BOTTOM | Rectangle.TOP); cell.setBorderWidth(2); cell.setBackgroundColor(new Color(200, 200, 200)); cell.setHorizontalAlignment(Element.ALIGN_CENTER); table.addCell(cell); } table.setHeaderRows(2); }
/* if (endrow < table.size() && endrow + minRowsTogether >= table.size()) {
if (endrow < table.size() && endrow + minRowsTogether >= table.size()) {
public void createPdf(OutputStream out, java.util.List<SQLTable> tables, ProfileManager pm) throws DocumentException, IOException, SQLException, ArchitectException, InstantiationException, IllegalAccessException { final int minRowsTogether = 1; // counts smaller than this are considered orphan/widow final int mtop = 50; // margin at top of page (in points) final int mbot = 50; // margin at bottom of page (page numbers are below this) final int pbot = 20; // padding between bottom margin and bottom of body text final int mlft = 50; // margin at left side of page final int mrgt = 50; // margin at right side of page final Rectangle pagesize = PageSize.LETTER.rotate(); final Document document = new Document(pagesize, mlft, mrgt, mtop, mbot); final PdfWriter writer = PdfWriter.getInstance(document, out); final float fsize = 6f; // the font size to use in the table body final BaseFont bf = BaseFont.createFont(BaseFont.HELVETICA, BaseFont.WINANSI, BaseFont.NOT_EMBEDDED); document.addTitle("Table Profiling Report"); document.addSubject("Tables: " + tables); document.addAuthor(System.getProperty("user.name")); document.addCreator("Power*Architect version "+ArchitectUtils.APP_VERSION); document.open(); // vertical position where next element should start // (bottom is 0; top is pagesize.height()) float pos = pagesize.height() - mtop; final PdfContentByte cb = writer.getDirectContent(); final PdfTemplate nptemplate = cb.createTemplate(50, 50); writer.setPageEvent(new PdfPageEventHelper() { // prints the "page N of <template>" footer public void onEndPage(PdfWriter writer, Document document) { int pageN = writer.getPageNumber(); String text = "Page " + pageN + " of "; float len = bf.getWidthPoint(text, fsize-2); cb.beginText(); cb.setFontAndSize(bf, fsize-2); cb.setTextMatrix(pagesize.width()/2 - len/2, mbot/2); cb.showText(text); cb.endText(); cb.addTemplate(nptemplate, pagesize.width()/2 - len/2 + len, mbot/2); } public void onCloseDocument(PdfWriter writer, Document document) { nptemplate.beginText(); nptemplate.setFontAndSize(bf, fsize-2); nptemplate.showText(String.valueOf(writer.getPageNumber() - 1)); nptemplate.endText(); } }); document.add(new Paragraph("Power*Architect Profiling Report")); document.add(new Paragraph("Generated "+new java.util.Date() +" by "+System.getProperty("user.name"))); float[] widths = new float[totalColumn]; // widths of widest cells per row in pdf table LinkedList<PdfPTable> profiles = new LinkedList<PdfPTable>(); // 1 table per profile result for (SQLTable t : tables) { PdfPTable table = makeNextTable(pm,t, bf, fsize, widths); profiles.add(table); } // add the PdfPTables to the document; try to avoid orphan and widow rows pos = writer.getVerticalPosition(true) - fsize; logger.debug("Starting at pos="+pos); for (PdfPTable table : profiles) { table.setTotalWidth(pagesize.width() - mrgt - mlft); table.setWidths(widths); int startrow = table.getHeaderRows(); int endrow = startrow; // current page will contain header+startrow..endrow while (endrow < table.size()) { // figure out how many body rows fit nicely on the page float endpos = pos - calcHeaderHeight(table); while (endpos > (mbot + pbot) && endrow < table.size() ) { endpos -= table.getRowHeight(endrow); endrow++; } // adjust for orphan rows. Might create widows or make // endrow < startrow, which is handled later by deferring the table/* if (endrow < table.size() && endrow + minRowsTogether >= table.size()) { if (endrow + 1 == table.size()) { // short by 1 row.. just squeeze it in endrow = table.size(); } else { // more than 1 row remains: shorten this page so orphans aren't lonely endrow = table.size() - minRowsTogether; } }*/ if (endrow == table.size() || endrow - startrow > minRowsTogether) { // this is the end of the table, or we have enough rows to bother printing pos = table.writeSelectedRows(0, table.getHeaderRows(), mlft, pos, cb); pos = table.writeSelectedRows(startrow, endrow, mlft, pos, cb); startrow = endrow; } else { // not the end of the table and not enough rows to print out endrow = startrow; } // new page if necessary (that is, when we aren't finished the table yet) if (endrow != table.size()) { document.newPage(); pos = pagesize.height() - mtop; } } } document.close(); }
}*/
}
public void createPdf(OutputStream out, java.util.List<SQLTable> tables, ProfileManager pm) throws DocumentException, IOException, SQLException, ArchitectException, InstantiationException, IllegalAccessException { final int minRowsTogether = 1; // counts smaller than this are considered orphan/widow final int mtop = 50; // margin at top of page (in points) final int mbot = 50; // margin at bottom of page (page numbers are below this) final int pbot = 20; // padding between bottom margin and bottom of body text final int mlft = 50; // margin at left side of page final int mrgt = 50; // margin at right side of page final Rectangle pagesize = PageSize.LETTER.rotate(); final Document document = new Document(pagesize, mlft, mrgt, mtop, mbot); final PdfWriter writer = PdfWriter.getInstance(document, out); final float fsize = 6f; // the font size to use in the table body final BaseFont bf = BaseFont.createFont(BaseFont.HELVETICA, BaseFont.WINANSI, BaseFont.NOT_EMBEDDED); document.addTitle("Table Profiling Report"); document.addSubject("Tables: " + tables); document.addAuthor(System.getProperty("user.name")); document.addCreator("Power*Architect version "+ArchitectUtils.APP_VERSION); document.open(); // vertical position where next element should start // (bottom is 0; top is pagesize.height()) float pos = pagesize.height() - mtop; final PdfContentByte cb = writer.getDirectContent(); final PdfTemplate nptemplate = cb.createTemplate(50, 50); writer.setPageEvent(new PdfPageEventHelper() { // prints the "page N of <template>" footer public void onEndPage(PdfWriter writer, Document document) { int pageN = writer.getPageNumber(); String text = "Page " + pageN + " of "; float len = bf.getWidthPoint(text, fsize-2); cb.beginText(); cb.setFontAndSize(bf, fsize-2); cb.setTextMatrix(pagesize.width()/2 - len/2, mbot/2); cb.showText(text); cb.endText(); cb.addTemplate(nptemplate, pagesize.width()/2 - len/2 + len, mbot/2); } public void onCloseDocument(PdfWriter writer, Document document) { nptemplate.beginText(); nptemplate.setFontAndSize(bf, fsize-2); nptemplate.showText(String.valueOf(writer.getPageNumber() - 1)); nptemplate.endText(); } }); document.add(new Paragraph("Power*Architect Profiling Report")); document.add(new Paragraph("Generated "+new java.util.Date() +" by "+System.getProperty("user.name"))); float[] widths = new float[totalColumn]; // widths of widest cells per row in pdf table LinkedList<PdfPTable> profiles = new LinkedList<PdfPTable>(); // 1 table per profile result for (SQLTable t : tables) { PdfPTable table = makeNextTable(pm,t, bf, fsize, widths); profiles.add(table); } // add the PdfPTables to the document; try to avoid orphan and widow rows pos = writer.getVerticalPosition(true) - fsize; logger.debug("Starting at pos="+pos); for (PdfPTable table : profiles) { table.setTotalWidth(pagesize.width() - mrgt - mlft); table.setWidths(widths); int startrow = table.getHeaderRows(); int endrow = startrow; // current page will contain header+startrow..endrow while (endrow < table.size()) { // figure out how many body rows fit nicely on the page float endpos = pos - calcHeaderHeight(table); while (endpos > (mbot + pbot) && endrow < table.size() ) { endpos -= table.getRowHeight(endrow); endrow++; } // adjust for orphan rows. Might create widows or make // endrow < startrow, which is handled later by deferring the table/* if (endrow < table.size() && endrow + minRowsTogether >= table.size()) { if (endrow + 1 == table.size()) { // short by 1 row.. just squeeze it in endrow = table.size(); } else { // more than 1 row remains: shorten this page so orphans aren't lonely endrow = table.size() - minRowsTogether; } }*/ if (endrow == table.size() || endrow - startrow > minRowsTogether) { // this is the end of the table, or we have enough rows to bother printing pos = table.writeSelectedRows(0, table.getHeaderRows(), mlft, pos, cb); pos = table.writeSelectedRows(startrow, endrow, mlft, pos, cb); startrow = endrow; } else { // not the end of the table and not enough rows to print out endrow = startrow; } // new page if necessary (that is, when we aren't finished the table yet) if (endrow != table.size()) { document.newPage(); pos = pagesize.height() - mtop; } } } document.close(); }
if (endrow == table.size() || endrow - startrow > minRowsTogether) {
if (endrow == table.size() || endrow - startrow >= minRowsTogether) {
public void createPdf(OutputStream out, java.util.List<SQLTable> tables, ProfileManager pm) throws DocumentException, IOException, SQLException, ArchitectException, InstantiationException, IllegalAccessException { final int minRowsTogether = 1; // counts smaller than this are considered orphan/widow final int mtop = 50; // margin at top of page (in points) final int mbot = 50; // margin at bottom of page (page numbers are below this) final int pbot = 20; // padding between bottom margin and bottom of body text final int mlft = 50; // margin at left side of page final int mrgt = 50; // margin at right side of page final Rectangle pagesize = PageSize.LETTER.rotate(); final Document document = new Document(pagesize, mlft, mrgt, mtop, mbot); final PdfWriter writer = PdfWriter.getInstance(document, out); final float fsize = 6f; // the font size to use in the table body final BaseFont bf = BaseFont.createFont(BaseFont.HELVETICA, BaseFont.WINANSI, BaseFont.NOT_EMBEDDED); document.addTitle("Table Profiling Report"); document.addSubject("Tables: " + tables); document.addAuthor(System.getProperty("user.name")); document.addCreator("Power*Architect version "+ArchitectUtils.APP_VERSION); document.open(); // vertical position where next element should start // (bottom is 0; top is pagesize.height()) float pos = pagesize.height() - mtop; final PdfContentByte cb = writer.getDirectContent(); final PdfTemplate nptemplate = cb.createTemplate(50, 50); writer.setPageEvent(new PdfPageEventHelper() { // prints the "page N of <template>" footer public void onEndPage(PdfWriter writer, Document document) { int pageN = writer.getPageNumber(); String text = "Page " + pageN + " of "; float len = bf.getWidthPoint(text, fsize-2); cb.beginText(); cb.setFontAndSize(bf, fsize-2); cb.setTextMatrix(pagesize.width()/2 - len/2, mbot/2); cb.showText(text); cb.endText(); cb.addTemplate(nptemplate, pagesize.width()/2 - len/2 + len, mbot/2); } public void onCloseDocument(PdfWriter writer, Document document) { nptemplate.beginText(); nptemplate.setFontAndSize(bf, fsize-2); nptemplate.showText(String.valueOf(writer.getPageNumber() - 1)); nptemplate.endText(); } }); document.add(new Paragraph("Power*Architect Profiling Report")); document.add(new Paragraph("Generated "+new java.util.Date() +" by "+System.getProperty("user.name"))); float[] widths = new float[totalColumn]; // widths of widest cells per row in pdf table LinkedList<PdfPTable> profiles = new LinkedList<PdfPTable>(); // 1 table per profile result for (SQLTable t : tables) { PdfPTable table = makeNextTable(pm,t, bf, fsize, widths); profiles.add(table); } // add the PdfPTables to the document; try to avoid orphan and widow rows pos = writer.getVerticalPosition(true) - fsize; logger.debug("Starting at pos="+pos); for (PdfPTable table : profiles) { table.setTotalWidth(pagesize.width() - mrgt - mlft); table.setWidths(widths); int startrow = table.getHeaderRows(); int endrow = startrow; // current page will contain header+startrow..endrow while (endrow < table.size()) { // figure out how many body rows fit nicely on the page float endpos = pos - calcHeaderHeight(table); while (endpos > (mbot + pbot) && endrow < table.size() ) { endpos -= table.getRowHeight(endrow); endrow++; } // adjust for orphan rows. Might create widows or make // endrow < startrow, which is handled later by deferring the table/* if (endrow < table.size() && endrow + minRowsTogether >= table.size()) { if (endrow + 1 == table.size()) { // short by 1 row.. just squeeze it in endrow = table.size(); } else { // more than 1 row remains: shorten this page so orphans aren't lonely endrow = table.size() - minRowsTogether; } }*/ if (endrow == table.size() || endrow - startrow > minRowsTogether) { // this is the end of the table, or we have enough rows to bother printing pos = table.writeSelectedRows(0, table.getHeaderRows(), mlft, pos, cb); pos = table.writeSelectedRows(startrow, endrow, mlft, pos, cb); startrow = endrow; } else { // not the end of the table and not enough rows to print out endrow = startrow; } // new page if necessary (that is, when we aren't finished the table yet) if (endrow != table.size()) { document.newPage(); pos = pagesize.height() - mtop; } } } document.close(); }
DBUtils.init();
public static void main(String[] args) throws Exception{ Runtime.getRuntime().addShutdownHook(new Thread(){ public void run(){ logger.info("jManage shutting down..."); DBUtils.shutdownDB(); } }); if (System.getSecurityManager() == null) { System.setSecurityManager(new RMISecurityManager()); } UserManager userManager = UserManager.getInstance(); User user = null; char[] password = null; int invalidAttempts = 0; if(args.length == 1){ password = args[0].toCharArray(); user = userManager.verifyUsernamePassword( AuthConstants.USER_ADMIN, password); /* invalid password was tried */ if(user == null){ invalidAttempts ++; } } while(user == null){ if(invalidAttempts > 0){ System.out.println("Invalid Admin Password."); } /* get the password */ password = PasswordField.getPassword("Enter password:"); /* the password should match for the admin user */ user = userManager.verifyUsernamePassword( AuthConstants.USER_ADMIN, password); invalidAttempts ++; if(invalidAttempts >= 3){ break; } } /* exit if the admin password is still invalid */ if(user == null){ System.out.println("Number of invalid attempts exceeded. Exiting !"); return; } /* set admin password as the stop key */ final JettyStopKey stopKey = new JettyStopKey(new String(password)); System.setProperty("STOP.KEY", stopKey.toString()); /* set stop.port */ System.setProperty("STOP.PORT", JManageProperties.getStopPort()); /* initialize ServiceFactory */ ServiceFactory.init(ServiceFactory.MODE_LOCAL); /* initialize crypto */ Crypto.init(password); /* clear the password */ Arrays.fill(password, ' '); /* load ACLs */ ACLStore.getInstance(); /* start the AlertEngine */ AlertEngine.getInstance().start(); /* start the application downtime service */ ApplicationDowntimeService.getInstance().start(); /* load connectors */ try{ ConnectorConfigRegistry.init(); }catch(Exception e){ logger.log(Level.SEVERE, "Error initializing connector config registry.", e); } try{ ConnectorRegistry.load(); }catch(Exception e){ logger.log(Level.SEVERE, "Error initializing connector registry.", e); } /* start the application */ start(); }
haplos = textData.generateHaplotypes(textData.blocks, 1);
haplos = textData.generateHaplotypes(textData.blocks, 1, false);
private void processFile(String fileName, int fileType, String infoFileName){ try { //int outputType; HaploData textData; File OutputFile; File inputFile; if(!HaploText.quietMode && fileName != null){ System.out.println("Using data file " + fileName); } inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } textData = new HaploData(); Vector result = null; if(fileType == HAPS){ //read in haps file textData.prepareHapsInput(inputFile); } else if (fileType == PED) { //read in ped file /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.arg_quiet) { System.out.println("Ignoring marker " + (index)); } } } }*/ result = textData.linkageToChrom(inputFile, 3, this.skipCheck); }else{ //read in hapmapfile result = textData.linkageToChrom(inputFile,4,this.skipCheck); } File infoFile = null; if(infoFileName != null){ infoFile = new File(infoFileName); } if (result != null){ textData.prepareMarkerInput(infoFile,textData.getPedFile().getHMInfo()); }else{ textData.prepareMarkerInput(infoFile,null); } if(!HaploText.quietMode && infoFile != null){ System.out.println("Using marker file " + infoFile.getName()); } if(this.showCheck && result != null) { CheckDataPanel cp = new CheckDataPanel(textData, false); cp.printTable(null); } if(this.outputCheck && result != null){ CheckDataPanel cp = new CheckDataPanel(textData, false); cp.printTable(new File (fileName + ".CHECK")); } Vector cust = new Vector(); if(outputType != -1){ textData.generateDPrimeTable(); Haplotype[][] haplos; switch(outputType){ case BLOX_GABRIEL: OutputFile = new File(fileName + ".GABRIELblocks"); break; case BLOX_4GAM: OutputFile = new File(fileName + ".4GAMblocks"); break; case BLOX_SPINE: OutputFile = new File(fileName + ".SPINEblocks"); break; case BLOX_CUSTOM: OutputFile = new File(fileName + ".CUSTblocks"); //read in the blocks file File blocksFile = new File(this.blockFileName); cust = textData.readBlocks(blocksFile); break; default: OutputFile = new File(fileName + ".GABRIELblocks"); break; } //this handles output type ALL if(outputType == BLOX_ALL) { OutputFile = new File(fileName + ".GABRIELblocks"); textData.guessBlocks(BLOX_GABRIEL); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".4GAMblocks"); textData.guessBlocks(BLOX_4GAM); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".SPINEblocks"); textData.guessBlocks(BLOX_SPINE); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); }else{ textData.guessBlocks(outputType, cust); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); } } if(this.outputDprime) { OutputFile = new File(fileName + ".DPRIME"); if (textData.dpTable != null){ textData.saveDprimeToText(OutputFile, TABLE_TYPE, 0, Chromosome.getSize()); }else{ textData.saveDprimeToText(OutputFile, LIVE_TYPE, 0, Chromosome.getSize()); } } if (this.outputPNG || this.outputSmallPNG){ OutputFile = new File(fileName + ".LD.PNG"); if (textData.dpTable == null){ textData.generateDPrimeTable(); textData.guessBlocks(BLOX_CUSTOM, new Vector()); } if (this.trackFileName != null){ textData.readAnalysisTrack(new File(this.trackFileName)); } DPrimeDisplay dpd = new DPrimeDisplay(textData); BufferedImage i = dpd.export(0,Chromosome.getSize(),this.outputSmallPNG); try{ Jimi.putImage("image/png", i, OutputFile.getName()); }catch(JimiException je){ System.out.println(je.getMessage()); } } //if(fileType){ //TDT.calcTrioTDT(textData.chromosomes); //TODO: Deal with this. why do we calc TDT? and make sure not to do it except when appropriate //} } catch(IOException e){ System.err.println("An error has occured. This probably has to do with file input or output"); } catch(HaploViewException e){ System.err.println(e.getMessage()); } catch(PedFileException pfe) { System.err.println(pfe.getMessage()); } }
public static Vector calcTrioTDT(PedFile pf) throws PedFileException{
public static Vector calcTrioTDT(PedFile pf){
public static Vector calcTrioTDT(PedFile pf) throws PedFileException{ Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for (int i = 0; i < numMarkers; i++){ TDTResult thisResult = new TDTResult(Chromosome.getUnfilteredMarker(i)); Vector indList = pf.getOrder(); Individual currentInd; Family currentFam; for (int j = 0; j < indList.size(); j++){ currentInd = (Individual)indList.elementAt(j); currentFam = pf.getFamily(currentInd.getFamilyID()); if (currentInd.hasBothParents() && currentInd.getAffectedStatus() == 2){ //if he has both parents, and is affected, we can get a transmission Individual mom = currentFam.getMember(currentInd.getMomID()); Individual dad = currentFam.getMember(currentInd.getDadID()); byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = dad.getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; thisMarker = mom.getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; byte momT=0, momU=0, dadT=0, dadU=0; if (kid1 == 0 || kid2 == 0 || dad1 == 0 || dad2 == 0 || mom1 == 0 || mom2 == 0) { continue; } else if (kid1 == kid2) { //kid homozygous if (dad1 == kid1) { dadT = dad1; dadU = dad2; } else { dadT = dad2; dadU = dad1; } if (mom1 == kid1) { momT = mom1; momU = mom2; } else { momT = mom2; momU = mom1; } } else { if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadT = dad1; dadU = dad2; if (kid1 == dad1) { momT = kid2; momU = kid1; } else { momT = kid1; momU = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momT = mom1; momU = mom2; if (kid1 == mom1) { dadT = kid2; dadU = kid1; } else { dadT = kid1; dadU = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadT = dad1; dadU = dad1; momT = mom1; momU = mom1; } else { //everybody het dadT = (byte)(4+dad1); dadU = (byte)(4+dad2); momT = (byte)(4+mom1); momU = (byte)(4+mom2); } } thisResult.tallyTrioInd(dadT, dadU); thisResult.tallyTrioInd(momT, momU); } } results.add(thisResult); } return results; }
Individual mom = currentFam.getMember(currentInd.getMomID()); Individual dad = currentFam.getMember(currentInd.getDadID());
Individual mom = null; Individual dad = null; try{ mom = currentFam.getMember(currentInd.getMomID()); dad = currentFam.getMember(currentInd.getDadID()); }catch (PedFileException pfe){ }
public static Vector calcTrioTDT(PedFile pf) throws PedFileException{ Vector results = new Vector(); int numMarkers = Chromosome.getUnfilteredSize(); for (int i = 0; i < numMarkers; i++){ TDTResult thisResult = new TDTResult(Chromosome.getUnfilteredMarker(i)); Vector indList = pf.getOrder(); Individual currentInd; Family currentFam; for (int j = 0; j < indList.size(); j++){ currentInd = (Individual)indList.elementAt(j); currentFam = pf.getFamily(currentInd.getFamilyID()); if (currentInd.hasBothParents() && currentInd.getAffectedStatus() == 2){ //if he has both parents, and is affected, we can get a transmission Individual mom = currentFam.getMember(currentInd.getMomID()); Individual dad = currentFam.getMember(currentInd.getDadID()); byte[] thisMarker = currentInd.getMarker(i); byte kid1 = thisMarker[0]; byte kid2 = thisMarker[1]; thisMarker = dad.getMarker(i); byte dad1 = thisMarker[0]; byte dad2 = thisMarker[1]; thisMarker = mom.getMarker(i); byte mom1 = thisMarker[0]; byte mom2 = thisMarker[1]; byte momT=0, momU=0, dadT=0, dadU=0; if (kid1 == 0 || kid2 == 0 || dad1 == 0 || dad2 == 0 || mom1 == 0 || mom2 == 0) { continue; } else if (kid1 == kid2) { //kid homozygous if (dad1 == kid1) { dadT = dad1; dadU = dad2; } else { dadT = dad2; dadU = dad1; } if (mom1 == kid1) { momT = mom1; momU = mom2; } else { momT = mom2; momU = mom1; } } else { if (dad1 == dad2 && mom1 != mom2) { //dad hom mom het dadT = dad1; dadU = dad2; if (kid1 == dad1) { momT = kid2; momU = kid1; } else { momT = kid1; momU = kid2; } } else if (mom1 == mom2 && dad1 != dad2) { //dad het mom hom momT = mom1; momU = mom2; if (kid1 == mom1) { dadT = kid2; dadU = kid1; } else { dadT = kid1; dadU = kid2; } } else if (dad1 == dad2 && mom1 == mom2) { //mom & dad hom dadT = dad1; dadU = dad1; momT = mom1; momU = mom1; } else { //everybody het dadT = (byte)(4+dad1); dadU = (byte)(4+dad2); momT = (byte)(4+mom1); momU = (byte)(4+mom2); } } thisResult.tallyTrioInd(dadT, dadU); thisResult.tallyTrioInd(momT, momU); } } results.add(thisResult); } return results; }
Object options[] = { "Create", "Exit" }; int retval = JOptionPane.showOptionDialog( null, "No known database exist.\nDo you want to create a new one?", "Photovault", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, options[0] ); if ( retval == JOptionPane.YES_OPTION ) { DbSettingsDlg dlg = new DbSettingsDlg( null, true ); if ( dlg.showDialog() != dlg.APPROVE_OPTION ) { System.exit( 0 ); } } else {
DbSettingsDlg dlg = new DbSettingsDlg( null, true ); if ( dlg.showDialog() != dlg.APPROVE_OPTION ) {
void run() { PhotovaultSettings settings = PhotovaultSettings.getSettings(); Collection databases = settings.getDatabases(); if ( databases.size() == 0 ) { // No known database exists, so create new Object options[] = { "Create", "Exit" }; int retval = JOptionPane.showOptionDialog( null, "No known database exist.\nDo you want to create a new one?", "Photovault", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, options[0] ); if ( retval == JOptionPane.YES_OPTION ) { DbSettingsDlg dlg = new DbSettingsDlg( null, true ); if ( dlg.showDialog() != dlg.APPROVE_OPTION ) { System.exit( 0 ); } } else { System.exit( 0 ); } } LoginDlg login = new LoginDlg( this ); boolean loginOK = false; while ( !loginOK ) { int retval = login.showDialog(); switch( retval ) { case LoginDlg.RETURN_REASON_CANCEL: System.exit( 0 ); break; case LoginDlg.RETURN_REASON_NEWDB: DbSettingsDlg dlg = new DbSettingsDlg( null, true ); if ( dlg.showDialog() == dlg.APPROVE_OPTION ) { login = new LoginDlg( this ); } break; case LoginDlg.RETURN_REASON_APPROVE: if ( login( login ) ) { loginOK = true; BrowserWindow wnd = new BrowserWindow(); } else { JOptionPane.showMessageDialog( null, "Error logging into Photovault", "Login error", JOptionPane.ERROR_MESSAGE ); } break; default: log.error( "Unknown return code form LoginDlg.showDialog(): " + retval ); break; } } }
textData.guessBlocks(outputType); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile);
if(outputType == 3) { OutputFile = new File(fileName + ".SFSblocks"); textData.guessBlocks(0); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".4GAMblocks"); textData.guessBlocks(1); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); OutputFile = new File(fileName + ".MJDblocks"); textData.guessBlocks(2); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); } else { textData.guessBlocks(outputType); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); }
private void processFile(String fileName,boolean fileType,String infoFileName){ try { int outputType; long maxDistance; HaploData textData; File OutputFile; File inputFile; inputFile = new File(fileName); if(!inputFile.exists()){ System.out.println("input file: " + fileName + " does not exist"); System.exit(1); } maxDistance = this.arg_distance * 1000; outputType = this.arg_output; textData = new HaploData(); if(!fileType){ //read in haps file textData.prepareHapsInput(inputFile); } else { //read in ped file PedFile ped; Vector pedFileStrings; BufferedReader reader; String line; Vector result; boolean[] markerResultArray; ped = new PedFile(); pedFileStrings = new Vector(); reader = new BufferedReader(new FileReader(inputFile)); result = new Vector(); while((line = reader.readLine())!=null){ pedFileStrings.add(line); } ped.parse(pedFileStrings); if(!arg_skipCheck) { result = ped.check(); } if(this.arg_showCheck) { System.out.println("Data check results:\n" + "Name\t\tObsHET\tPredHET\tHWpval\t%Geno\tFamTrio\tMendErr"); for(int i=0;i<result.size();i++){ MarkerResult currentResult = (MarkerResult)result.get(i); System.out.println( currentResult.getName() +"\t"+ currentResult.getObsHet() +"\t"+ currentResult.getPredHet() +"\t"+ currentResult.getHWpvalue() +"\t"+ currentResult.getGenoPercent() +"\t"+ currentResult.getFamTrioNum() +"\t"+ currentResult.getMendErrNum()); } } markerResultArray = new boolean[ped.getNumMarkers()]; for (int i = 0; i < markerResultArray.length; i++){ if(this.arg_skipCheck) { markerResultArray[i] = true; } else if(((MarkerResult)result.get(i)).getRating() > 0) { markerResultArray[i] = true; } else { markerResultArray[i] = false; } } /* if(this.arg_ignoreMarkers.size()>0) { for(int i=0;i<this.arg_ignoreMarkers.size();i++){ int index = Integer.parseInt((String)this.arg_ignoreMarkers.get(i)); if(index>0 && index<markerResultArray.length){ markerResultArray[index] = false; if(!this.arg_quiet) { System.out.println("Ignoring marker " + (index)); } } } }*/ textData.linkageToChrom(markerResultArray,ped); } String name = fileName; String baseName = fileName.substring(0,name.length()-5); if(!infoFileName.equals("")) { File infoFile = new File(infoFileName); if(infoFile.exists()) { textData.prepareMarkerInput(infoFile,maxDistance); System.out.println("Using marker file " + infoFile.getName()); } else if(!this.arg_quiet) { System.out.println("info file " + infoFileName + " does not exist"); } } else { File maybeInfo = new File(baseName + ".info"); if (maybeInfo.exists()){ textData.prepareMarkerInput(maybeInfo,maxDistance); if(!arg_quiet){ System.out.println("Using marker file " + maybeInfo.getName()); } } } textData.generateDPrimeTable(maxDistance); Haplotype[][] haplos; if(outputType != -1){ switch(outputType){ case 0: OutputFile = new File(fileName + ".SFSblocks"); break; case 1: OutputFile = new File(fileName + ".4GAMblocks"); break; case 2: OutputFile = new File(fileName + ".MJDblocks"); break; //case 3: //TODO: need to do all three here // break; default: OutputFile = new File(fileName + ".SFSblocks"); break; } textData.guessBlocks(outputType); haplos = textData.generateHaplotypes(textData.blocks, 1); textData.saveHapsToText(orderHaps(haplos, textData), textData.getMultiDprime(), OutputFile); } if(this.arg_dprime) { OutputFile = new File(fileName + ".DPRIME"); textData.saveDprimeToText(textData.filteredDPrimeTable,OutputFile,false,null); } if(fileType){ TDT myTDT = new TDT(); myTDT.calcTDT(textData.chromosomes); } } catch(IOException e){} catch(HaploViewException e){ System.out.println(e.getMessage()); } }
printQualified(t.getPhysicalName());
print( DDLUtils.toQualifiedName(t.getCatalogName(),t.getSchemaName(),t.getPhysicalName()) );
public void modifyColumn(SQLColumn c) throws ArchitectDiffException { Map colNameMap = new HashMap(); SQLTable t = c.getParentTable(); print("\n ALTER TABLE "); printQualified(t.getPhysicalName()); print(" MODIFY "); print(columnDefinition(c,colNameMap)); endStatement(DDLStatement.StatementType.MODIFY, c); }
log.debug( "fireselectionChangedEvent " + selected );
protected void fireSelectionChangeEvent() { Iterator iter = folderTreeListeners.iterator(); while ( iter.hasNext() ) { PhotoFolderTreeListener l = (PhotoFolderTreeListener) iter.next(); l.photoFolderTreeSelectionChanged( new PhotoFolderTreeEvent( this, selected ) ); } }
f.setDescription( "Changed name to " + newName );
log.debug( "Changed name to " + newName );
void renameSelectedFolder() { if ( selected != null ) { String origName = selected.getName(); String newName = (String) JOptionPane.showInputDialog( this, "Enter new name", "Rename folder", JOptionPane.PLAIN_MESSAGE, null, null, origName ); if ( newName != null ) { PhotoFolder f = selected; f.setName( newName ); f.setDescription( "Changed name to " + newName ); } } }
throw new JellyException( "This tag does not understand the attribute '" + name + "'", getColumnNumber(), getLineNumber()
throw createJellyException( "This tag does not understand the attribute '" + name + "'"
public Script compile() throws Exception { if (tag instanceof CompilableTag) { ((CompilableTag) tag).compile(); } List typeList = new ArrayList(); List methodList = new ArrayList(); List expressionList = new ArrayList(); BeanInfo info = Introspector.getBeanInfo(tag.getClass()); PropertyDescriptor[] descriptors = info.getPropertyDescriptors(); Set attributeSet = new HashSet(); if (descriptors != null) { for (int i = 0, size = descriptors.length; i < size; i++) { PropertyDescriptor descriptor = descriptors[i]; String name = descriptor.getName(); Expression expression = (Expression) attributes.get(name); if (expression != null) { attributeSet.add( name ); Method writeMethod = descriptor.getWriteMethod(); if (writeMethod != null) { Class type = descriptor.getPropertyType(); expressionList.add(expression); methodList.add(writeMethod); typeList.add(type); if (log.isDebugEnabled()) { log.debug( "Adding tag property name: " + name + " type: " + type.getName() + " expression: " + expression); } } } } } // System.err.println( "BeanTagScript::compile() " + this ); // now create the arrays to avoid object allocation & casting when // running the script int size = expressionList.size(); expressions = new Expression[size]; methods = new Method[size]; types = new Class[size]; expressionList.toArray(expressions); methodList.toArray(methods); typeList.toArray(types); // compile body tag.setBody(tag.getBody().compile()); // now lets check for any attributes that are not used for ( Iterator iter = attributes.keySet().iterator(); iter.hasNext(); ) { String name = (String) iter.next(); if ( ! attributeSet.contains( name ) ) { throw new JellyException( "This tag does not understand the attribute '" + name + "'", getColumnNumber(), getLineNumber() ); } } return this; }
throw new JellyException(
throw createJellyException(
public void run(JellyContext context, XMLOutput output) throws Exception { tag.setContext(context); // initialize all the properties of the tag before its used // if there is a problem abort this tag for (int i = 0, size = expressions.length; i < size; i++) { Expression expression = expressions[i]; Method method = methods[i]; Class type = types[i]; // some types are Expression objects so let the tag // evaluate them Object value = null; if (type.isAssignableFrom(Expression.class) && !type.isAssignableFrom(Object.class)) { value = expression; } else { value = expression.evaluate(context); } // convert value to correct type if (value != null) { value = convertType(value, type); } Object[] arguments = { value }; try { method.invoke(tag, arguments); } catch (Exception e) { String valueTypeName = (value != null ) ? value.getClass().getName() : "null"; log.warn( "Cannot call method: " + method.getName() + " as I cannot convert: " + value + " of type: " + valueTypeName + " into type: " + type.getName() ); throw new JellyException( "Cannot call method: " + method.getName() + " on tag of type: " + tag.getClass().getName() + " with value: " + value + " of type: " + valueTypeName + ". Exception: " + e, getColumnNumber(), getLineNumber() ); } } try { tag.doTag(output); } catch (JellyException e) { handleException(e); } catch (Exception e) { handleException(e); } }
+ valueTypeName + ". Exception: " + e, getColumnNumber(), getLineNumber()
+ valueTypeName + ". Exception: " + e, e
public void run(JellyContext context, XMLOutput output) throws Exception { tag.setContext(context); // initialize all the properties of the tag before its used // if there is a problem abort this tag for (int i = 0, size = expressions.length; i < size; i++) { Expression expression = expressions[i]; Method method = methods[i]; Class type = types[i]; // some types are Expression objects so let the tag // evaluate them Object value = null; if (type.isAssignableFrom(Expression.class) && !type.isAssignableFrom(Object.class)) { value = expression; } else { value = expression.evaluate(context); } // convert value to correct type if (value != null) { value = convertType(value, type); } Object[] arguments = { value }; try { method.invoke(tag, arguments); } catch (Exception e) { String valueTypeName = (value != null ) ? value.getClass().getName() : "null"; log.warn( "Cannot call method: " + method.getName() + " as I cannot convert: " + value + " of type: " + valueTypeName + " into type: " + type.getName() ); throw new JellyException( "Cannot call method: " + method.getName() + " on tag of type: " + tag.getClass().getName() + " with value: " + value + " of type: " + valueTypeName + ". Exception: " + e, getColumnNumber(), getLineNumber() ); } } try { tag.doTag(output); } catch (JellyException e) { handleException(e); } catch (Exception e) { handleException(e); } }
JFrame jf = new JFrame();
final JFrame jf = new JFrame(); jf.setSize(400, 600);
private JFrame createMenuTest(int max) throws HeadlessException { JFrame jf = new JFrame(); final JMenuBar jb = new JMenuBar(); jf.setJMenuBar(jb); fileMenu = new JMenu("File"); for (int i = 0; i <= max; i++) { fileMenu.add(new JMenuItem(Integer.toString(i))); } jf.setSize(400, 600); jb.add(fileMenu); ASUtils.breakLongMenu(jf, fileMenu); jf.setVisible(true); return jf; }
fileMenu.add(new JMenuItem(Integer.toString(i)));
if ( i==10 || i == 9 ) { final JMenuItem jmi = new JMenuItem(Integer.toString(i)); fileMenu.add(jmi); jmi.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { System.out.println("jf.bounds="+jf.getBounds()); System.out.println("jmi:"+jmi.getBounds()); } }); } else { fileMenu.add(new JMenuItem(Integer.toString(i))); }
private JFrame createMenuTest(int max) throws HeadlessException { JFrame jf = new JFrame(); final JMenuBar jb = new JMenuBar(); jf.setJMenuBar(jb); fileMenu = new JMenu("File"); for (int i = 0; i <= max; i++) { fileMenu.add(new JMenuItem(Integer.toString(i))); } jf.setSize(400, 600); jb.add(fileMenu); ASUtils.breakLongMenu(jf, fileMenu); jf.setVisible(true); return jf; }
jf.setSize(400, 600);
private JFrame createMenuTest(int max) throws HeadlessException { JFrame jf = new JFrame(); final JMenuBar jb = new JMenuBar(); jf.setJMenuBar(jb); fileMenu = new JMenu("File"); for (int i = 0; i <= max; i++) { fileMenu.add(new JMenuItem(Integer.toString(i))); } jf.setSize(400, 600); jb.add(fileMenu); ASUtils.breakLongMenu(jf, fileMenu); jf.setVisible(true); return jf; }
for (int i = 69; i <= 71; i++)
for (int i = 48; i <= 51; i++)
public static void main(String[] args) { for (int i = 69; i <= 71; i++) new TestASUtilsMenu().createMenuTest(i); }
final JDialog d = ArchitectPanelBuilder.createArchitectPanelDialog(
d = ArchitectPanelBuilder.createArchitectPanelDialog(
private void makeDialog(SQLTable table) { final TableEditPanel editPanel = new TableEditPanel(table); Action okAction = new AbstractAction() { public void actionPerformed(ActionEvent evt) { editPanel.applyChanges(); // XXX: also apply changes on mapping tab d.setVisible(false); } }; Action cancelAction = new AbstractAction() { public void actionPerformed(ActionEvent evt) { editPanel.discardChanges(); // XXX: also discard changes on mapping tab d.setVisible(false); } }; final JDialog d = ArchitectPanelBuilder.createArchitectPanelDialog( editPanel, ArchitectFrame.getMainInstance(), "Table Properties", "OK", okAction, cancelAction); d.pack(); d.setLocationRelativeTo(ArchitectFrame.getMainInstance()); d.setVisible(true); }
try{ PropertyUtils.setProperty( nested, "name", tagName ); } catch (Exception e) { log.warn( "Caught exception setting nested name: " + tagName, e ); }
public void doTag(XMLOutput output) throws JellyTagException { Project project = getAntProject(); String tagName = getTagName(); Object parentObject = findBeanAncestor(); Object parentTask = findParentTaskObject(); // lets assume that Task instances are not nested inside other Task instances // for example <manifest> inside a <jar> should be a nested object, where as // if the parent is not a Task the <manifest> should create a ManifestTask // // also its possible to have a root Ant tag which isn't a task, such as when // defining <fileset id="...">...</fileset> Object nested = null; if (parentObject != null && !( parentTask instanceof TaskContainer) ) { nested = createNestedObject( parentObject, tagName ); } if (nested == null) { task = createTask( tagName ); if (task != null) { if ( log.isDebugEnabled() ) { log.debug( "Creating an ant Task for name: " + tagName ); } // the following algorithm follows the lifetime of a tag // http://jakarta.apache.org/ant/manual/develop.html#writingowntask // kindly recommended by Stefan Bodewig // create and set its project reference if ( task instanceof TaskAdapter ) { setObject( ((TaskAdapter)task).getProxy() ); } else { setObject( task ); } // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, task ); } // ### we might want to spoof a Target setting here // now lets initialize task.init(); // now lets invoke the body to call all the createXXX() or addXXX() methods String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets set the addText() of the body content, if its applicaable Method method = MethodUtils.getAccessibleMethod( task.getClass(), "addText", addTaskParamTypes ); if (method != null) { Object[] args = { body }; try { method.invoke(this.task, args); } catch (IllegalAccessException e) { throw new JellyTagException(e); } catch (InvocationTargetException e) { throw new JellyTagException(e); } } // now lets set all the attributes of the child elements // XXXX: to do! // now we're ready to invoke the task // XXX: should we call execute() or perform()? task.perform(); } } if (task == null) { if (nested == null) { if ( log.isDebugEnabled() ) { log.debug( "Trying to create a data type for tag: " + tagName ); } nested = createDataType( tagName ); } else { if ( log.isDebugEnabled() ) { log.debug( "Created nested property tag: " + tagName ); } } if ( nested != null ) { setObject( nested ); // set the task ID if one is given Object id = getAttributes().remove( "id" ); if ( id != null ) { project.addReference( (String) id, nested ); } // TODO: work out why we always set the name attribute. // See JELLY-105. try{ PropertyUtils.setProperty( nested, "name", tagName ); } catch (Exception e) { log.warn( "Caught exception setting nested name: " + tagName, e ); } // now lets invoke the body String body = getBodyText(); // now lets set any attributes of this tag... setBeanProperties(); // now lets add it to its parent if ( parentObject != null ) { IntrospectionHelper ih = IntrospectionHelper.getHelper( parentObject.getClass() ); try { if (log.isDebugEnabled()) { log.debug("About to set the: " + tagName + " property on: " + parentObject + " to value: " + nested + " with type: " + nested.getClass() ); } ih.storeElement( project, parentObject, nested, tagName.toLowerCase() ); } catch (Exception e) { log.warn( "Caught exception setting nested: " + tagName, e ); } // now try to set the property for good measure // as the storeElement() method does not // seem to call any setter methods of non-String types try { BeanUtils.setProperty( parentObject, tagName, nested ); } catch (Exception e) { log.debug("Caught exception trying to set property: " + tagName + " on: " + parentObject); } } } else { log.warn("Could not convert tag: " + tagName + " into an Ant task, data type or property"); // lets treat this tag as static XML... StaticTag tag = new StaticTag("", tagName, tagName); tag.setParent( getParent() ); tag.setBody( getBody() ); tag.setContext(context); for (Iterator iter = getAttributes().entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String name = (String) entry.getKey(); Object value = entry.getValue(); tag.setAttribute(name, value); } tag.doTag(output); } } }
counts[0][1]++; counts[1][0]++;
public void tallyInd(byte alleleT, byte alleleU) { if(alleleT == 5 && alleleU == 5) { if(tallyHet){ counts[0][0]++; counts[1][1]++; this.tallyHet = false; } else { counts[0][1]++; counts[1][0]++; this.tallyHet = true; } } else if( (alleleT != alleleU) && (alleleT!=0) && (alleleU!=0) ) { if(allele1==0 && allele2==0 ) { allele1 = alleleT; allele2 = alleleU; } if(alleleT == allele1) { counts[0][0]++; } else if(alleleT==allele2) { counts[0][1]++; } if(alleleU == allele1){ counts[1][0]++; } else if(alleleU == allele2) { counts[1][1]++; } } //System.out.println( counts[0][0] + "\t" + counts[1][1] + "\t" + counts[0][1] + "\t" + counts[1][0]); }
println(out, "<profiles>");
ProfileManager profmgr = getProfileManager(); println(out, "<profiles topNCount=\""+profmgr.getTopNCount()+"\">");
private void saveProfiles(PrintWriter out) { println(out, "<profiles>"); indent++; ProfileManager profmgr = getProfileManager(); Map<SQLObject, ProfileResult> results = profmgr.getResults(); for (Map.Entry<SQLObject, ProfileResult> e : results.entrySet()) { SQLObject so = e.getKey(); ProfileResult profileResult = e.getValue(); print(out, "<profile-result ref-id=\""+objectIdMap.get(so)+"\"" + " type=\"" + profileResult.getClass().getName() + "\"" + " createStartTime=\""+profileResult.getCreateStartTime()+"\"" + " createEndTime=\""+profileResult.getCreateEndTime()+"\"" + " error=\""+profileResult.isError()+"\""); if (profileResult.getException() != null) { niprint(out, " exception-type=\""+ArchitectUtils.escapeXML(profileResult.getException().getClass().getName())+"\""); niprint(out, " exception-message=\""+ArchitectUtils.escapeXML(profileResult.getException().getMessage())+"\""); } if (profileResult instanceof TableProfileResult) { TableProfileResult tpr = (TableProfileResult) profileResult; print(out, " rowCount=\""+tpr.getRowCount()+"\""); niprintln(out, "/>"); } else if (profileResult instanceof ColumnProfileResult) { ColumnProfileResult cpr = (ColumnProfileResult) profileResult; niprint(out, " avgLength=\"" + cpr.getAvgLength() + "\""); niprint(out, " avgValue=\"" + cpr.getAvgValue() + "\""); niprint(out, " distinctValueCount=\"" + cpr.getDistinctValueCount() + "\""); niprint(out, " maxValue=\"" + ArchitectUtils.escapeXML(String.valueOf(cpr.getMaxValue())) + "\""); niprint(out, " minValue=\"" + ArchitectUtils.escapeXML(String.valueOf(cpr.getMinValue())) + "\""); niprint(out, " minLength=\"" + cpr.getMinLength() + "\""); niprint(out, " maxLength=\"" + cpr.getMaxLength() + "\""); niprint(out, " nullCount=\"" + cpr.getNullCount() + "\""); niprintln(out, ">"); indent++; List<ColumnValueCount> valueCount = cpr.getValueCount(); if (valueCount != null) { for (ColumnValueCount count : valueCount) { println(out, "<value occurrences=\""+count.getCount()+"\">"+ArchitectUtils.escapeXML(String.valueOf(count.getValue()))+"</value>"); } } indent--; println(out, "</profile-result>"); } else { String message = "Unknown ProfileResult Subclass: " + profileResult.getClass().getName(); niprintln(out, "/> <!-- " + message + "-->"); logger.error(message); } } println(out, "</profiles>"); indent--; }
ProfileManager profmgr = getProfileManager();
private void saveProfiles(PrintWriter out) { println(out, "<profiles>"); indent++; ProfileManager profmgr = getProfileManager(); Map<SQLObject, ProfileResult> results = profmgr.getResults(); for (Map.Entry<SQLObject, ProfileResult> e : results.entrySet()) { SQLObject so = e.getKey(); ProfileResult profileResult = e.getValue(); print(out, "<profile-result ref-id=\""+objectIdMap.get(so)+"\"" + " type=\"" + profileResult.getClass().getName() + "\"" + " createStartTime=\""+profileResult.getCreateStartTime()+"\"" + " createEndTime=\""+profileResult.getCreateEndTime()+"\"" + " error=\""+profileResult.isError()+"\""); if (profileResult.getException() != null) { niprint(out, " exception-type=\""+ArchitectUtils.escapeXML(profileResult.getException().getClass().getName())+"\""); niprint(out, " exception-message=\""+ArchitectUtils.escapeXML(profileResult.getException().getMessage())+"\""); } if (profileResult instanceof TableProfileResult) { TableProfileResult tpr = (TableProfileResult) profileResult; print(out, " rowCount=\""+tpr.getRowCount()+"\""); niprintln(out, "/>"); } else if (profileResult instanceof ColumnProfileResult) { ColumnProfileResult cpr = (ColumnProfileResult) profileResult; niprint(out, " avgLength=\"" + cpr.getAvgLength() + "\""); niprint(out, " avgValue=\"" + cpr.getAvgValue() + "\""); niprint(out, " distinctValueCount=\"" + cpr.getDistinctValueCount() + "\""); niprint(out, " maxValue=\"" + ArchitectUtils.escapeXML(String.valueOf(cpr.getMaxValue())) + "\""); niprint(out, " minValue=\"" + ArchitectUtils.escapeXML(String.valueOf(cpr.getMinValue())) + "\""); niprint(out, " minLength=\"" + cpr.getMinLength() + "\""); niprint(out, " maxLength=\"" + cpr.getMaxLength() + "\""); niprint(out, " nullCount=\"" + cpr.getNullCount() + "\""); niprintln(out, ">"); indent++; List<ColumnValueCount> valueCount = cpr.getValueCount(); if (valueCount != null) { for (ColumnValueCount count : valueCount) { println(out, "<value occurrences=\""+count.getCount()+"\">"+ArchitectUtils.escapeXML(String.valueOf(count.getValue()))+"</value>"); } } indent--; println(out, "</profile-result>"); } else { String message = "Unknown ProfileResult Subclass: " + profileResult.getClass().getName(); niprintln(out, "/> <!-- " + message + "-->"); logger.error(message); } } println(out, "</profiles>"); indent--; }
d.addSetProperties("*/profiles");
private Digester setupDigester() { Digester d = new Digester(); d.setValidating(false); d.push(this); // project name d.addCallMethod("architect-project/project-name", "setName", 0); // argument is element body text // source DB connection specs (deprecated in favour of project-data-sources) DBCSFactory dbcsFactory = new DBCSFactory(); d.addFactoryCreate("architect-project/project-connection-specs/dbcs", dbcsFactory); d.addSetProperties ("architect-project/project-connection-specs/dbcs", new String[] {"connection-name", "driver-class", "jdbc-url", "user-name", "user-pass", "sequence-number", "single-login"}, new String[] {"displayName", "driverClass", "url", "user", "pass", "seqNo", "singleLogin"}); d.addCallMethod("architect-project/project-connection-specs/dbcs", "setName", 0); // these instances get picked out of the dbcsIdMap by the SQLDatabase factory // project data sources (replaces project connection specs) d.addFactoryCreate("architect-project/project-data-sources/data-source", dbcsFactory); d.addCallMethod("architect-project/project-data-sources/data-source/property", "put", 2); d.addCallParam("architect-project/project-data-sources/data-source/property", 0, "key"); d.addCallParam("architect-project/project-data-sources/data-source/property", 1, "value"); //d.addSetNext("architect-project/project-data-sources/data-source", ); // source database hierarchy d.addObjectCreate("architect-project/source-databases", LinkedList.class); d.addSetNext("architect-project/source-databases", "setSourceDatabaseList"); SQLDatabaseFactory dbFactory = new SQLDatabaseFactory(); d.addFactoryCreate("architect-project/source-databases/database", dbFactory); d.addSetProperties("architect-project/source-databases/database"); d.addSetNext("architect-project/source-databases/database", "add"); d.addObjectCreate("architect-project/source-databases/database/catalog", SQLCatalog.class); d.addSetProperties("architect-project/source-databases/database/catalog"); d.addSetNext("architect-project/source-databases/database/catalog", "addChild"); SQLSchemaFactory schemaFactory = new SQLSchemaFactory(); d.addFactoryCreate("*/schema", schemaFactory); d.addSetProperties("*/schema"); d.addSetNext("*/schema", "addChild"); SQLTableFactory tableFactory = new SQLTableFactory(); d.addFactoryCreate("*/table", tableFactory); d.addSetProperties("*/table"); d.addSetNext("*/table", "addChild"); SQLFolderFactory folderFactory = new SQLFolderFactory(); d.addFactoryCreate("*/folder", folderFactory); d.addSetProperties("*/folder"); d.addSetNext("*/folder", "addChild"); SQLColumnFactory columnFactory = new SQLColumnFactory(); d.addFactoryCreate("*/column", columnFactory); d.addSetProperties("*/column"); // this needs to be manually set last to prevent generic types // from overwriting database specific types // Old name (it has been updated to sourceDataTypeName) d.addCallMethod("*/column","setSourceDataTypeName",1); d.addCallParam("*/column",0,"sourceDBTypeName"); // new name d.addCallMethod("*/column","setSourceDataTypeName",1); d.addCallParam("*/column",0,"sourceDataTypeName"); d.addSetNext("*/column", "addChild"); SQLRelationshipFactory relationshipFactory = new SQLRelationshipFactory(); d.addFactoryCreate("*/relationship", relationshipFactory); d.addSetProperties("*/relationship"); // the factory adds the relationships to the correct PK and FK tables ColumnMappingFactory columnMappingFactory = new ColumnMappingFactory(); d.addFactoryCreate("*/column-mapping", columnMappingFactory); d.addSetProperties("*/column-mapping"); d.addSetNext("*/column-mapping", "addChild"); SQLExceptionFactory exceptionFactory = new SQLExceptionFactory(); d.addFactoryCreate("*/sql-exception", exceptionFactory); d.addSetProperties("*/sql-exception"); d.addSetNext("*/sql-exception", "addChild"); TargetDBFactory targetDBFactory = new TargetDBFactory(); // target database hierarchy d.addFactoryCreate("architect-project/target-database", targetDBFactory); d.addSetProperties("architect-project/target-database"); // the play pen TablePaneFactory tablePaneFactory = new TablePaneFactory(); d.addFactoryCreate("architect-project/play-pen/table-pane", tablePaneFactory); // factory will add the tablepanes to the playpen PPRelationshipFactory ppRelationshipFactory = new PPRelationshipFactory(); d.addFactoryCreate("architect-project/play-pen/table-link", ppRelationshipFactory); DDLGeneratorFactory ddlgFactory = new DDLGeneratorFactory(); d.addFactoryCreate("architect-project/ddl-generator", ddlgFactory); d.addSetProperties("architect-project/ddl-generator"); CompareDMSettingFactory settingFactory = new CompareDMSettingFactory(); d.addFactoryCreate("architect-project/compare-dm-settings", settingFactory); d.addSetProperties("architect-project/compare-dm-settings"); CompareDMStuffSettingFactory sourceStuffFactory = new CompareDMStuffSettingFactory(true); d.addFactoryCreate("architect-project/compare-dm-settings/source-stuff", sourceStuffFactory); d.addSetProperties("architect-project/compare-dm-settings/source-stuff"); CompareDMStuffSettingFactory targetStuffFactory = new CompareDMStuffSettingFactory(false); d.addFactoryCreate("architect-project/compare-dm-settings/target-stuff", targetStuffFactory); d.addSetProperties("architect-project/compare-dm-settings/target-stuff"); ProfileManagerFactory profileManagerFactory = new ProfileManagerFactory(); d.addFactoryCreate("*/profiles", profileManagerFactory); ProfileResultFactory profileResultFactory = new ProfileResultFactory(); d.addFactoryCreate("*/profiles/profile-result", profileResultFactory); d.addSetProperties("*/profiles/profile-result"); d.addSetNext("*/profiles/profile-result", "putResult"); FileFactory fileFactory = new FileFactory(); d.addFactoryCreate("*/file", fileFactory); d.addSetNext("*/file", "setFile"); d.addSetNext("architect-project/ddl-generator", "setDDLGenerator"); return d; }
filteredDPrimeTable = getFilteredTable();
void prepareMarkerInput(File infile, long maxdist) throws IOException, HaploViewException{ //this method is called to gather data about the markers used. //It is assumed that the input file is two columns, the first being //the name and the second the absolute position. the maxdist is //used to determine beyond what distance comparisons will not be //made. if the infile param is null, loads up "dummy info" for //situation where no info file exists if (infile != null){ if (infile.length() < 1){ throw new HaploViewException("Info file is empty or does not exist: " + infile.getName()); } String currentLine; Vector markers = new Vector(); long negMaxdist = -1 * maxdist; //read the input file: BufferedReader in = new BufferedReader(new FileReader(infile)); // a vector of SNP's is created and returned. int snpcount = 0; int lineCount = 0; while ((currentLine = in.readLine()) != null){ StringTokenizer st = new StringTokenizer(currentLine); if (st.countTokens() > 1){ lineCount++; }else if (st.countTokens() == 1){ //complain if only one field found throw new HaploViewException("Info file format error on line "+lineCount+ ":\n Info file must be of format: <markername> <markerposition>"); }else{ //skip blank lines continue; } if (lineCount > Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nToo many markers")); } //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int i = 0; i < chromosomes.size(); i++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(i)).getGenotype(snpcount); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; String name = st.nextToken(); String loc = st.nextToken(); try{ markers.add(new SNP(name, Long.parseLong(loc), infile.getName(), maf)); }catch (NumberFormatException nfe){ throw new HaploViewException("Info file format error on line "+lineCount+ ":\n\"" + loc + "\" should be of type long." + "\n Info file must be of format: <markername> <markerposition>"); } snpcount ++; } if (lineCount < Chromosome.markers.length){ throw(new HaploViewException("Info file error:\nNot enough markers")); } Chromosome.markers = markers.toArray(); if (dPrimeTable != null){ //loop through the dprime table to null-out distant markers for (int pos2 = 1; pos2 < dPrimeTable.length; pos2++){ for (int pos1 = 0; pos1 < pos2; pos1++){ long sep = Chromosome.getMarker(pos1).getPosition() - Chromosome.getMarker(pos2).getPosition(); if (maxdist > 0){ if ((sep > maxdist || sep < negMaxdist)){ dPrimeTable[pos1][pos2] = null; continue; } } } } } infoKnown=true; }else{ double numChroms = chromosomes.size(); Vector markerInfo = new Vector(); numBadGenotypes = new double[Chromosome.getSize()]; percentBadGenotypes = new double[Chromosome.getSize()]; for (int i = 0; i < Chromosome.getSize(); i++){ //to compute maf, browse chrom list and count instances of each allele byte a1 = 0; double numa1 = 0; double numa2 = 0; for (int j = 0; j < chromosomes.size(); j++){ //if there is a data point for this marker on this chromosome byte thisAllele = ((Chromosome)chromosomes.elementAt(j)).getGenotype(i); if (!(thisAllele == 0)){ if (thisAllele == 5){ numa1+=0.5; numa2+=0.5; }else if (a1 == 0){ a1 = thisAllele; numa1++; }else if (thisAllele == a1){ numa1++; }else{ numa2++; } } else { numBadGenotypes[i]++; } } double maf = numa1/(numa2+numa1); if (maf > 0.5) maf = 1.0-maf; markerInfo.add(new SNP(String.valueOf(i+1), (i*4000), maf)); percentBadGenotypes[i] = numBadGenotypes[i]/numChroms; } Chromosome.markers = markerInfo.toArray(); } }
db.addVolume( v );
try { db.addVolume( v ); } catch (PhotovaultException ex) { fail( ex.getMessage() ); }
public void testIndexing() { int n; ExternalVolume v = new ExternalVolume( "extVol", extVolDir.getAbsolutePath() ); PhotovaultSettings settings = PhotovaultSettings.getSettings(); PVDatabase db = settings.getDatabase( "pv_junit" ); db.addVolume( v ); ExtVolIndexer indexer = new ExtVolIndexer( v ); indexer.setTopFolder( topFolder ); TestListener l = new TestListener(); indexer.addIndexerListener( l ); assertEquals( "Indexing not started -> completeness must be 0", 0, indexer.getPercentComplete() ); assertNull( "StartTime must be null before starting", indexer.getStartTime() ); indexer.run(); if ( ODMG.getODMGImplementation().currentTransaction() != null ) { fail( "Still in transaction" ); } // Check that all the files can be found PhotoInfo[] photos1 = PhotoInfo.retrieveByOrigHash( hash1 ); if ( ODMG.getODMGImplementation().currentTransaction() != null ) { fail( "Still in transaction" ); } assertEquals( "Only 1 photo per picture should be found", 1, photos1.length ); PhotoInfo p1 = photos1[0]; assertEquals( "2 instances should be found in photo 1", 2, p1.getNumInstances() ); PhotoInfo[] photos2 = PhotoInfo.retrieveByOrigHash( hash2 ); if ( ODMG.getODMGImplementation().currentTransaction() != null ) { fail( "Still in transaction" ); } assertEquals( "1 photo per picture should be found", 1, photos2.length ); PhotoInfo p2 = photos2[0]; assertEquals( "3 instances should be found in photo 2", 3, p2.getNumInstances() );//// ImageInstance i1 = p1.getInstance( 0 );// assertEquals( i1.getImageFile(), photo1 ); // CHeck that both instrances of p2 can be found boolean found[] = {false, false}; File files[] = {photo2inst1, photo2inst2}; for ( n = 0; n < p2.getNumInstances(); n++ ) { ImageInstance i = p2.getInstance( n ); for ( int m = 0; m < found.length; m++ ) { if ( files[m].equals( i.getImageFile() ) ) { found[m] = true; } } } for ( n = 0; n < found.length; n++ ) { assertTrue( "Photo " + n + " not found", found[n] ); } if ( ODMG.getODMGImplementation().currentTransaction() != null ) { fail( "Still in transaction" ); } // Check that the folders have the correct photos PhotoInfo[] photosInTopFolder = { p1, p2 }; assertFolderHasPhotos( topFolder, photosInTopFolder ); PhotoFolder subFolder = topFolder.getSubfolder( 0 ); assertEquals( "Subfolder name not correct", "test", subFolder.getName() ); PhotoInfo[] photosInSubFolder = { p2 }; assertFolderHasPhotos( subFolder, photosInSubFolder ); // Check that the listener was called correctly assertEquals( "Wrong photo count in listener", 2, l.photoCount ); assertEquals( "Wrong photo count in indexer statistics", 2, indexer.getNewPhotoCount() ); assertEquals( "Wrong instance count in listener", 3, l.instanceCount ); assertEquals( "Wrong instance count in indexer statistics", 3, indexer.getNewInstanceCount() ); assertEquals( "Indexing complete 100%", 100, indexer.getPercentComplete() ); assertNotNull( "StartTime still null", indexer.getStartTime() ); if ( ODMG.getODMGImplementation().currentTransaction() != null ) { fail( "Still in transaction" ); } // Next, let's make some modifications to the external volume try { // New file File testfile3 = new File( "testfiles", "test3.jpg" ); File f3 = new File( extVolDir, "test3.jpg"); FileUtils.copyFile( testfile3, f3 ); // Replace the test1 file with test3 File f1 = new File ( extVolDir, "test1.jpg" ); FileUtils.copyFile( testfile3, f1 ); // Remove 1 copy of test2 File f2 = new File( extVolDir, "test2.jpg" ); f2.delete(); } catch (IOException ex) { fail( "IOException while altering external volume: " + ex.getMessage() ); } indexer = new ExtVolIndexer( v ); indexer.setTopFolder( topFolder ); l = new TestListener(); indexer.addIndexerListener( l ); assertEquals( "Indexing not started -> completeness must be 0", 0, indexer.getPercentComplete() ); assertNull( "StartTime must be null before starting", indexer.getStartTime() ); indexer.run(); // Check that the folders have the correct photos PhotoInfo[] photos3 = PhotoInfo.retrieveByOrigHash( hash3 ); assertEquals( "1 photo per picture should be found", 1, photos3.length ); PhotoInfo p3 = photos3[0]; PhotoInfo photosInTopFolder2[] = { p3 }; assertFolderHasPhotos( topFolder, photosInTopFolder2 ); assertEquals( "More than 1 subfolder in topFolder", 1, topFolder.getSubfolderCount() ); subFolder = topFolder.getSubfolder( 0 ); assertEquals( "Subfolder name not correct", "test", subFolder.getName() ); PhotoInfo[] photosInSubFolder2 = { p2 }; assertFolderHasPhotos( subFolder, photosInSubFolder2 ); Collection p2folders = p2.getFolders(); assertFalse( "p2 must not be in topFolder", p2folders.contains( topFolder ) ); }
return new JexlExpression( org.apache.commons.jexl.ExpressionFactory.createExpression( text ) );
int length = text.length(); if ( length > 3 && text.startsWith( "${" ) && text.charAt( length - 1 ) == '}' ) { text = text.substring( 2, length - 1 ); return new JexlExpression( org.apache.commons.jexl.ExpressionFactory.createExpression( text ) ); } return null;
public Expression createExpression(String text) throws Exception { return new JexlExpression( org.apache.commons.jexl.ExpressionFactory.createExpression( text ) ); }